diff --git a/README.md b/README.md index 8fefeb2f..a85761e1 100644 --- a/README.md +++ b/README.md @@ -39,6 +39,7 @@ The main features of GQL are: * Supports GraphQL queries, mutations and [subscriptions](https://gql.readthedocs.io/en/latest/usage/subscriptions.html) * Supports [sync or async usage](https://gql.readthedocs.io/en/latest/async/index.html), [allowing concurrent requests](https://gql.readthedocs.io/en/latest/advanced/async_advanced_usage.html#async-advanced-usage) * Supports [File uploads](https://gql.readthedocs.io/en/latest/usage/file_upload.html) +* Supports [Custom scalars / Enums](https://gql.readthedocs.io/en/latest/usage/custom_scalars_and_enums.html) * [gql-cli script](https://gql.readthedocs.io/en/latest/gql-cli/intro.html) to execute GraphQL queries from the command line * [DSL module](https://gql.readthedocs.io/en/latest/advanced/dsl_module.html) to compose GraphQL queries dynamically diff --git a/docs/usage/custom_scalars.rst b/docs/usage/custom_scalars.rst deleted file mode 100644 index baee441e..00000000 --- a/docs/usage/custom_scalars.rst +++ /dev/null @@ -1,134 +0,0 @@ -Custom Scalars -============== - -Scalar types represent primitive values at the leaves of a query. - -GraphQL provides a number of built-in scalars (Int, Float, String, Boolean and ID), but a GraphQL backend -can add additional custom scalars to its schema to better express values in their data model. - -For example, a schema can define the Datetime scalar to represent an ISO-8601 encoded date. - -The schema will then only contain: - -.. code-block:: python - - scalar Datetime - -When custom scalars are sent to the backend (as inputs) or from the backend (as outputs), -their values need to be serialized to be composed -of only built-in scalars, then at the destination the serialized values will be parsed again to -be able to represent the scalar in its local internal representation. - -Because this serialization/unserialization is dependent on the language used at both sides, it is not -described in the schema and needs to be defined independently at both sides (client, backend). - -A custom scalar value can have two different representations during its transport: - - - as a serialized value (usually as json): - - * in the results sent by the backend - * in the variables sent by the client alongside the query - - - as "literal" inside the query itself sent by the client - -To define a custom scalar, you need 3 methods: - - - a :code:`serialize` method used: - - * by the backend to serialize a custom scalar output in the result - * by the client to serialize a custom scalar input in the variables - - - a :code:`parse_value` method used: - - * by the backend to unserialize custom scalars inputs in the variables sent by the client - * by the client to unserialize custom scalars outputs from the results - - - a :code:`parse_literal` method used: - - * by the backend to unserialize custom scalars inputs inside the query itself - -To define a custom scalar object, we define a :code:`GraphQLScalarType` from graphql-core with -its name and the implementation of the above methods. - -Example for Datetime: - -.. code-block:: python - - from datetime import datetime - from typing import Any, Dict, Optional - - from graphql import GraphQLScalarType, ValueNode - from graphql.utilities import value_from_ast_untyped - - - def serialize_datetime(value: Any) -> str: - return value.isoformat() - - - def parse_datetime_value(value: Any) -> datetime: - return datetime.fromisoformat(value) - - - def parse_datetime_literal( - value_node: ValueNode, variables: Optional[Dict[str, Any]] = None - ) -> datetime: - ast_value = value_from_ast_untyped(value_node, variables) - return parse_datetime_value(ast_value) - - - DatetimeScalar = GraphQLScalarType( - name="Datetime", - serialize=serialize_datetime, - parse_value=parse_datetime_value, - parse_literal=parse_datetime_literal, - ) - -Custom Scalars in inputs ------------------------- - -To provide custom scalars in input with gql, you can: - -- serialize the scalar yourself as "literal" in the query: - -.. code-block:: python - - query = gql( - """{ - shiftDays(time: "2021-11-12T11:58:13.461161", days: 5) - }""" - ) - -- serialize the scalar yourself in a variable: - -.. code-block:: python - - query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") - - variable_values = { - "time": "2021-11-12T11:58:13.461161", - } - - result = client.execute(query, variable_values=variable_values) - -- add a custom scalar to the schema with :func:`update_schema_scalars ` - and execute the query with :code:`serialize_variables=True` - and gql will serialize the variable values from a Python object representation. - -For this, you need to provide a schema or set :code:`fetch_schema_from_transport=True` -in the client to request the schema from the backend. - -.. code-block:: python - - from gql.utilities import update_schema_scalars - - async with Client(transport=transport, fetch_schema_from_transport=True) as session: - - update_schema_scalars(session.client.schema, [DatetimeScalar]) - - query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") - - variable_values = {"time": datetime.now()} - - result = await session.execute( - query, variable_values=variable_values, serialize_variables=True - ) diff --git a/docs/usage/custom_scalars_and_enums.rst b/docs/usage/custom_scalars_and_enums.rst new file mode 100644 index 00000000..fc9008d8 --- /dev/null +++ b/docs/usage/custom_scalars_and_enums.rst @@ -0,0 +1,333 @@ +Custom scalars and enums +======================== + +.. _custom_scalars: + +Custom scalars +-------------- + +Scalar types represent primitive values at the leaves of a query. + +GraphQL provides a number of built-in scalars (Int, Float, String, Boolean and ID), but a GraphQL backend +can add additional custom scalars to its schema to better express values in their data model. + +For example, a schema can define the Datetime scalar to represent an ISO-8601 encoded date. + +The schema will then only contain:: + + scalar Datetime + +When custom scalars are sent to the backend (as inputs) or from the backend (as outputs), +their values need to be serialized to be composed +of only built-in scalars, then at the destination the serialized values will be parsed again to +be able to represent the scalar in its local internal representation. + +Because this serialization/unserialization is dependent on the language used at both sides, it is not +described in the schema and needs to be defined independently at both sides (client, backend). + +A custom scalar value can have two different representations during its transport: + + - as a serialized value (usually as json): + + * in the results sent by the backend + * in the variables sent by the client alongside the query + + - as "literal" inside the query itself sent by the client + +To define a custom scalar, you need 3 methods: + + - a :code:`serialize` method used: + + * by the backend to serialize a custom scalar output in the result + * by the client to serialize a custom scalar input in the variables + + - a :code:`parse_value` method used: + + * by the backend to unserialize custom scalars inputs in the variables sent by the client + * by the client to unserialize custom scalars outputs from the results + + - a :code:`parse_literal` method used: + + * by the backend to unserialize custom scalars inputs inside the query itself + +To define a custom scalar object, graphql-core provides the :code:`GraphQLScalarType` class +which contains the implementation of the above methods. + +Example for Datetime: + +.. code-block:: python + + from datetime import datetime + from typing import Any, Dict, Optional + + from graphql import GraphQLScalarType, ValueNode + from graphql.utilities import value_from_ast_untyped + + + def serialize_datetime(value: Any) -> str: + return value.isoformat() + + + def parse_datetime_value(value: Any) -> datetime: + return datetime.fromisoformat(value) + + + def parse_datetime_literal( + value_node: ValueNode, variables: Optional[Dict[str, Any]] = None + ) -> datetime: + ast_value = value_from_ast_untyped(value_node, variables) + return parse_datetime_value(ast_value) + + + DatetimeScalar = GraphQLScalarType( + name="Datetime", + serialize=serialize_datetime, + parse_value=parse_datetime_value, + parse_literal=parse_datetime_literal, + ) + +If you get your schema from a "schema.graphql" file or from introspection, +then the generated schema in the gql Client will contain default :code:`GraphQLScalarType` instances +where the serialize and parse_value methods simply return the serialized value without modification. + +In that case, if you want gql to parse custom scalars to a more useful Python representation, +or to serialize custom scalars variables from a Python representation, +then you can use the :func:`update_schema_scalars ` +or :func:`update_schema_scalar ` methods +to modify the definition of a scalar in your schema so that gql could do the parsing/serialization. + +.. code-block:: python + + from gql.utilities import update_schema_scalar + + with open('path/to/schema.graphql') as f: + schema_str = f.read() + + client = Client(schema=schema_str, ...) + + update_schema_scalar(client.schema, "Datetime", DatetimeScalar) + + # or update_schema_scalars(client.schema, [DatetimeScalar]) + +.. _enums: + +Enums +----- + +GraphQL Enum types are a special kind of scalar that is restricted to a particular set of allowed values. + +For example, the schema may have a Color enum and contain:: + + enum Color { + RED + GREEN + BLUE + } + +Graphql-core provides the :code:`GraphQLEnumType` class to define an enum in the schema +(See `graphql-core schema building docs`_). + +This class defines how the enum is serialized and parsed. + +If you get your schema from a "schema.graphql" file or from introspection, +then the generated schema in the gql Client will contain default :code:`GraphQLEnumType` instances +which should serialize/parse enums to/from its String representation (the :code:`RED` enum +will be serialized to :code:`'RED'`). + +You may want to parse enums to convert them to Python Enum types. +In that case, you can use the :func:`update_schema_enum ` +to modify the default :code:`GraphQLEnumType` to use your defined Enum. + +Example: + +.. code-block:: python + + from enum import Enum + from gql.utilities import update_schema_enum + + class Color(Enum): + RED = 0 + GREEN = 1 + BLUE = 2 + + with open('path/to/schema.graphql') as f: + schema_str = f.read() + + client = Client(schema=schema_str, ...) + + update_schema_enum(client.schema, 'Color', Color) + +Serializing Inputs +------------------ + +To provide custom scalars and/or enums in inputs with gql, you can: + +- serialize the inputs manually +- let gql serialize the inputs using the custom scalars and enums defined in the schema + +Manually +^^^^^^^^ + +You can serialize inputs yourself: + + - in the query itself + - in variables + +This has the advantage that you don't need a schema... + +In the query +"""""""""""" + +- custom scalar: + +.. code-block:: python + + query = gql( + """{ + shiftDays(time: "2021-11-12T11:58:13.461161", days: 5) + }""" + ) + +- enum: + +.. code-block:: python + + query = gql("{opposite(color: RED)}") + +In a variable +""""""""""""" + +- custom scalar: + +.. code-block:: python + + query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") + + variable_values = { + "time": "2021-11-12T11:58:13.461161", + } + + result = client.execute(query, variable_values=variable_values) + +- enum: + +.. code-block:: python + + query = gql( + """ + query GetOppositeColor($color: Color) { + opposite(color:$color) + }""" + ) + + variable_values = { + "color": 'RED', + } + + result = client.execute(query, variable_values=variable_values) + +Automatically +^^^^^^^^^^^^^ + +If you have custom scalar and/or enums defined in your schema +(See: :ref:`custom_scalars` and :ref:`enums`), +then you can request gql to serialize your variables automatically. + +- use :code:`Client(..., serialize_variables=True)` to request serializing variables for all queries +- use :code:`execute(..., serialize_variables=True)` or :code:`subscribe(..., serialize_variables=True)` if + you want gql to serialize the variables only for a single query. + +Examples: + +- custom scalars: + +.. code-block:: python + + from gql.utilities import update_schema_scalars + + from .myscalars import DatetimeScalar + + async with Client(transport=transport, fetch_schema_from_transport=True) as session: + + # We update the schema we got from introspection with our custom scalar type + update_schema_scalars(session.client.schema, [DatetimeScalar]) + + # In the query, the custom scalar in the input is set to a variable + query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") + + # the argument for time is a datetime instance + variable_values = {"time": datetime.now()} + + # we execute the query with serialize_variables set to True + result = await session.execute( + query, variable_values=variable_values, serialize_variables=True + ) + +- enums: + +.. code-block:: python + + from gql.utilities import update_schema_enum + + from .myenums import Color + + async with Client(transport=transport, fetch_schema_from_transport=True) as session: + + # We update the schema we got from introspection with our custom enum + update_schema_enum(session.client.schema, 'Color', Color) + + # In the query, the enum in the input is set to a variable + query = gql( + """ + query GetOppositeColor($color: Color) { + opposite(color:$color) + }""" + ) + + # the argument for time is an instance of our Enum type + variable_values = { + "color": Color.RED, + } + + # we execute the query with serialize_variables set to True + result = client.execute( + query, variable_values=variable_values, serialize_variables=True + ) + +Parsing output +-------------- + +By default, gql returns the serialized result from the backend without parsing +(except json unserialization to Python default types). + +if you want to convert the result of custom scalars to custom objects, +you can request gql to parse the results. + +- use :code:`Client(..., parse_results=True)` to request parsing for all queries +- use :code:`execute(..., parse_result=True)` or :code:`subscribe(..., parse_result=True)` if + you want gql to parse only the result of a single query. + +Same example as above, with result parsing enabled: + +.. code-block:: python + + from gql.utilities import update_schema_scalars + + async with Client(transport=transport, fetch_schema_from_transport=True) as session: + + update_schema_scalars(session.client.schema, [DatetimeScalar]) + + query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") + + variable_values = {"time": datetime.now()} + + result = await session.execute( + query, + variable_values=variable_values, + serialize_variables=True, + parse_result=True, + ) + + # now result["time"] type is a datetime instead of string + +.. _graphql-core schema building docs: https://graphql-core-3.readthedocs.io/en/latest/usage/schema.html diff --git a/docs/usage/index.rst b/docs/usage/index.rst index 4a38093a..eebf9fd2 100644 --- a/docs/usage/index.rst +++ b/docs/usage/index.rst @@ -10,4 +10,4 @@ Usage variables headers file_upload - custom_scalars + custom_scalars_and_enums diff --git a/gql/client.py b/gql/client.py index 368193cc..079bb552 100644 --- a/gql/client.py +++ b/gql/client.py @@ -17,7 +17,8 @@ from .transport.exceptions import TransportQueryError from .transport.local_schema import LocalSchemaTransport from .transport.transport import Transport -from .variable_values import serialize_variable_values +from .utilities import parse_result as parse_result_fn +from .utilities import serialize_variable_values class Client: @@ -48,6 +49,8 @@ def __init__( transport: Optional[Union[Transport, AsyncTransport]] = None, fetch_schema_from_transport: bool = False, execute_timeout: Optional[Union[int, float]] = 10, + serialize_variables: bool = False, + parse_results: bool = False, ): """Initialize the client with the given parameters. @@ -59,6 +62,10 @@ def __init__( :param execute_timeout: The maximum time in seconds for the execution of a request before a TimeoutError is raised. Only used for async transports. Passing None results in waiting forever for a response. + :param serialize_variables: whether the variable values should be + serialized. Used for custom scalars and/or enums. Default: False. + :param parse_results: Whether gql will try to parse the serialized output + sent by the backend. Can be used to unserialize custom scalars or enums. """ assert not ( type_def and introspection @@ -108,6 +115,9 @@ def __init__( # Enforced timeout of the execute function (only for async transports) self.execute_timeout = execute_timeout + self.serialize_variables = serialize_variables + self.parse_results = parse_results + def validate(self, document: DocumentNode): """:meta private:""" assert ( @@ -296,7 +306,8 @@ def _execute( *args, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, - serialize_variables: bool = False, + serialize_variables: Optional[bool] = None, + parse_result: Optional[bool] = None, **kwargs, ) -> ExecutionResult: """Execute the provided document AST synchronously using @@ -307,6 +318,8 @@ def _execute( :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. Default: False. + :param parse_result: Whether gql will unserialize the result. + By default use the parse_results attribute of the client. The extra arguments are passed to the transport execute method.""" @@ -315,15 +328,18 @@ def _execute( self.client.validate(document) # Parse variable values for custom scalars if requested - if serialize_variables and variable_values is not None: - variable_values = serialize_variable_values( - self.client.schema, - document, - variable_values, - operation_name=operation_name, - ) + if variable_values is not None: + if serialize_variables or ( + serialize_variables is None and self.client.serialize_variables + ): + variable_values = serialize_variable_values( + self.client.schema, + document, + variable_values, + operation_name=operation_name, + ) - return self.transport.execute( + result = self.transport.execute( document, *args, variable_values=variable_values, @@ -331,13 +347,26 @@ def _execute( **kwargs, ) + # Unserialize the result if requested + if self.client.schema: + if parse_result or (parse_result is None and self.client.parse_results): + result.data = parse_result_fn( + self.client.schema, + document, + result.data, + operation_name=operation_name, + ) + + return result + def execute( self, document: DocumentNode, *args, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, - serialize_variables: bool = False, + serialize_variables: Optional[bool] = None, + parse_result: Optional[bool] = None, **kwargs, ) -> Dict: """Execute the provided document AST synchronously using @@ -351,6 +380,8 @@ def execute( :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. Default: False. + :param parse_result: Whether gql will unserialize the result. + By default use the parse_results attribute of the client. The extra arguments are passed to the transport execute method.""" @@ -361,6 +392,7 @@ def execute( variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, + parse_result=parse_result, **kwargs, ) @@ -408,7 +440,8 @@ async def _subscribe( *args, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, - serialize_variables: bool = False, + serialize_variables: Optional[bool] = None, + parse_result: Optional[bool] = None, **kwargs, ) -> AsyncGenerator[ExecutionResult, None]: """Coroutine to subscribe asynchronously to the provided document AST @@ -423,6 +456,8 @@ async def _subscribe( :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. Default: False. + :param parse_result: Whether gql will unserialize the result. + By default use the parse_results attribute of the client. The extra arguments are passed to the transport subscribe method.""" @@ -431,13 +466,16 @@ async def _subscribe( self.client.validate(document) # Parse variable values for custom scalars if requested - if serialize_variables and variable_values is not None: - variable_values = serialize_variable_values( - self.client.schema, - document, - variable_values, - operation_name=operation_name, - ) + if variable_values is not None: + if serialize_variables or ( + serialize_variables is None and self.client.serialize_variables + ): + variable_values = serialize_variable_values( + self.client.schema, + document, + variable_values, + operation_name=operation_name, + ) # Subscribe to the transport inner_generator: AsyncGenerator[ @@ -456,7 +494,20 @@ async def _subscribe( try: async for result in inner_generator: + + if self.client.schema: + if parse_result or ( + parse_result is None and self.client.parse_results + ): + result.data = parse_result_fn( + self.client.schema, + document, + result.data, + operation_name=operation_name, + ) + yield result + finally: await inner_generator.aclose() @@ -466,7 +517,8 @@ async def subscribe( *args, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, - serialize_variables: bool = False, + serialize_variables: Optional[bool] = None, + parse_result: Optional[bool] = None, **kwargs, ) -> AsyncGenerator[Dict, None]: """Coroutine to subscribe asynchronously to the provided document AST @@ -480,6 +532,8 @@ async def subscribe( :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. Default: False. + :param parse_result: Whether gql will unserialize the result. + By default use the parse_results attribute of the client. The extra arguments are passed to the transport subscribe method.""" @@ -489,6 +543,7 @@ async def subscribe( variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, + parse_result=parse_result, **kwargs, ) @@ -513,7 +568,8 @@ async def _execute( *args, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, - serialize_variables: bool = False, + serialize_variables: Optional[bool] = None, + parse_result: Optional[bool] = None, **kwargs, ) -> ExecutionResult: """Coroutine to execute the provided document AST asynchronously using @@ -527,6 +583,8 @@ async def _execute( :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. Default: False. + :param parse_result: Whether gql will unserialize the result. + By default use the parse_results attribute of the client. The extra arguments are passed to the transport execute method.""" @@ -535,16 +593,19 @@ async def _execute( self.client.validate(document) # Parse variable values for custom scalars if requested - if serialize_variables and variable_values is not None: - variable_values = serialize_variable_values( - self.client.schema, - document, - variable_values, - operation_name=operation_name, - ) + if variable_values is not None: + if serialize_variables or ( + serialize_variables is None and self.client.serialize_variables + ): + variable_values = serialize_variable_values( + self.client.schema, + document, + variable_values, + operation_name=operation_name, + ) # Execute the query with the transport with a timeout - return await asyncio.wait_for( + result = await asyncio.wait_for( self.transport.execute( document, variable_values=variable_values, @@ -555,13 +616,26 @@ async def _execute( self.client.execute_timeout, ) + # Unserialize the result if requested + if self.client.schema: + if parse_result or (parse_result is None and self.client.parse_results): + result.data = parse_result_fn( + self.client.schema, + document, + result.data, + operation_name=operation_name, + ) + + return result + async def execute( self, document: DocumentNode, *args, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, - serialize_variables: bool = False, + serialize_variables: Optional[bool] = None, + parse_result: Optional[bool] = None, **kwargs, ) -> Dict: """Coroutine to execute the provided document AST asynchronously using @@ -575,6 +649,8 @@ async def execute( :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. Default: False. + :param parse_result: Whether gql will unserialize the result. + By default use the parse_results attribute of the client. The extra arguments are passed to the transport execute method.""" @@ -585,6 +661,7 @@ async def execute( variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, + parse_result=parse_result, **kwargs, ) diff --git a/gql/utilities/__init__.py b/gql/utilities/__init__.py index 68b80156..d17f9b2d 100644 --- a/gql/utilities/__init__.py +++ b/gql/utilities/__init__.py @@ -1,5 +1,13 @@ -from .update_schema_scalars import update_schema_scalars +from .parse_result import parse_result +from .serialize_variable_values import serialize_value, serialize_variable_values +from .update_schema_enum import update_schema_enum +from .update_schema_scalars import update_schema_scalar, update_schema_scalars __all__ = [ "update_schema_scalars", + "update_schema_scalar", + "update_schema_enum", + "parse_result", + "serialize_variable_values", + "serialize_value", ] diff --git a/gql/utilities/parse_result.py b/gql/utilities/parse_result.py new file mode 100644 index 00000000..ecb73474 --- /dev/null +++ b/gql/utilities/parse_result.py @@ -0,0 +1,446 @@ +import logging +from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union, cast + +from graphql import ( + IDLE, + REMOVE, + DocumentNode, + FieldNode, + FragmentDefinitionNode, + FragmentSpreadNode, + GraphQLError, + GraphQLInterfaceType, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLType, + InlineFragmentNode, + NameNode, + Node, + OperationDefinitionNode, + SelectionSetNode, + TypeInfo, + TypeInfoVisitor, + Visitor, + is_leaf_type, + print_ast, + visit, +) +from graphql.language.visitor import VisitorActionEnum +from graphql.pyutils import inspect + +log = logging.getLogger(__name__) + +# Equivalent to QUERY_DOCUMENT_KEYS but only for fields interesting to +# visit to parse the results +RESULT_DOCUMENT_KEYS: Dict[str, Tuple[str, ...]] = { + "document": ("definitions",), + "operation_definition": ("selection_set",), + "selection_set": ("selections",), + "field": ("selection_set",), + "inline_fragment": ("selection_set",), + "fragment_definition": ("selection_set",), +} + + +def _ignore_non_null(type_: GraphQLType): + """Removes the GraphQLNonNull wrappings around types.""" + if isinstance(type_, GraphQLNonNull): + return type_.of_type + else: + return type_ + + +def _get_fragment(document, fragment_name): + """Returns a fragment from the document.""" + for definition in document.definitions: + if isinstance(definition, FragmentDefinitionNode): + if definition.name.value == fragment_name: + return definition + + raise GraphQLError(f'Fragment "{fragment_name}" not found in document!') + + +class ParseResultVisitor(Visitor): + def __init__( + self, + schema: GraphQLSchema, + document: DocumentNode, + node: Node, + result: Dict[str, Any], + type_info: TypeInfo, + visit_fragment: bool = False, + inside_list_level: int = 0, + operation_name: Optional[str] = None, + ): + """Recursive Implementation of a Visitor class to parse results + correspondind to a schema and a document. + + Using a TypeInfo class to get the node types during traversal. + + If we reach a list in the results, then we parse each + item of the list recursively, traversing the same nodes + of the query again. + + During traversal, we keep the current position in the result + in the result_stack field. + + Alongside the field type, we calculate the "result type" + which is computed from the field type and the current + recursive level we are for this field + (:code:`inside_list_level` argument). + """ + self.schema: GraphQLSchema = schema + self.document: DocumentNode = document + self.node: Node = node + self.result: Dict[str, Any] = result + self.type_info: TypeInfo = type_info + self.visit_fragment: bool = visit_fragment + self.inside_list_level = inside_list_level + self.operation_name = operation_name + + self.result_stack: List[Any] = [] + + @property + def current_result(self): + try: + return self.result_stack[-1] + except IndexError: + return self.result + + @staticmethod + def leave_document(node: DocumentNode, *_args: Any) -> Dict[str, Any]: + results = cast(List[Dict[str, Any]], node.definitions) + return {k: v for result in results for k, v in result.items()} + + def enter_operation_definition( + self, node: OperationDefinitionNode, *_args: Any + ) -> Union[None, VisitorActionEnum]: + + if self.operation_name is not None: + if not hasattr(node.name, "value"): + return REMOVE # pragma: no cover + + node.name = cast(NameNode, node.name) + + if node.name.value != self.operation_name: + log.debug(f"SKIPPING operation {node.name.value}") + return REMOVE + + return IDLE + + @staticmethod + def leave_operation_definition( + node: OperationDefinitionNode, *_args: Any + ) -> Dict[str, Any]: + selections = cast(List[Dict[str, Any]], node.selection_set) + return {k: v for s in selections for k, v in s.items()} + + @staticmethod + def leave_selection_set(node: SelectionSetNode, *_args: Any) -> Dict[str, Any]: + partial_results = cast(Dict[str, Any], node.selections) + return partial_results + + @staticmethod + def in_first_field(path): + return path.count("selections") <= 1 + + def get_current_result_type(self, path): + field_type = self.type_info.get_type() + + list_level = self.inside_list_level + + result_type = _ignore_non_null(field_type) + + if self.in_first_field(path): + + while list_level > 0: + assert isinstance(result_type, GraphQLList) + result_type = _ignore_non_null(result_type.of_type) + + list_level -= 1 + + return result_type + + def enter_field( + self, + node: FieldNode, + key: str, + parent: Node, + path: List[Node], + ancestors: List[Node], + ) -> Union[None, VisitorActionEnum, Dict[str, Any]]: + + name = node.alias.value if node.alias else node.name.value + + if log.isEnabledFor(logging.DEBUG): + log.debug(f"Enter field {name}") + log.debug(f" path={path!r}") + log.debug(f" current_result={self.current_result!r}") + + if self.current_result is None: + # Result was null for this field -> remove + return REMOVE + + elif isinstance(self.current_result, Mapping): + + try: + result_value = self.current_result[name] + except KeyError: + # Key not found in result. + # Should never happen in theory with a correct GraphQL backend + # Silently ignoring this field + log.debug(f"Key {name} not found in result --> REMOVE") + return REMOVE + + log.debug(f" result_value={result_value}") + + # We get the field_type from type_info + field_type = self.type_info.get_type() + + # We calculate a virtual "result type" depending on our recursion level. + result_type = self.get_current_result_type(path) + + # If the result for this field is a list, then we need + # to recursively visit the same node multiple times for each + # item in the list. + if ( + not isinstance(result_value, Mapping) + and isinstance(result_value, Iterable) + and not isinstance(result_value, str) + and not is_leaf_type(result_type) + ): + + # Finding out the inner type of the list + inner_type = _ignore_non_null(result_type.of_type) + + if log.isEnabledFor(logging.DEBUG): + log.debug(" List detected:") + log.debug(f" field_type={inspect(field_type)}") + log.debug(f" result_type={inspect(result_type)}") + log.debug(f" inner_type={inspect(inner_type)}\n") + + visits: List[Dict[str, Any]] = [] + + # Get parent type + initial_type = self.type_info.get_parent_type() + assert isinstance( + initial_type, (GraphQLObjectType, GraphQLInterfaceType) + ) + + # Get parent SelectionSet node + new_node = ancestors[-1] + assert isinstance(new_node, SelectionSetNode) + + for item in result_value: + + new_result = {name: item} + + if log.isEnabledFor(logging.DEBUG): + log.debug(f" recursive new_result={new_result}") + log.debug(f" recursive ast={print_ast(node)}") + log.debug(f" recursive path={path!r}") + log.debug(f" recursive initial_type={initial_type!r}\n") + + if self.in_first_field(path): + inside_list_level = self.inside_list_level + 1 + else: + inside_list_level = 1 + + inner_visit = parse_result_recursive( + self.schema, + self.document, + new_node, + new_result, + initial_type=initial_type, + inside_list_level=inside_list_level, + ) + log.debug(f" recursive result={inner_visit}\n") + + inner_visit = cast(List[Dict[str, Any]], inner_visit) + visits.append(inner_visit[0][name]) + + result_value = {name: visits} + log.debug(f" recursive visits final result = {result_value}\n") + return result_value + + # If the result for this field is not a list, then add it + # to the result stack so that it becomes the current_value + # for the next inner fields + self.result_stack.append(result_value) + + return IDLE + + raise GraphQLError( + f"Invalid result for container of field {name}: {self.current_result!r}" + ) + + def leave_field( + self, + node: FieldNode, + key: str, + parent: Node, + path: List[Node], + ancestors: List[Node], + ) -> Dict[str, Any]: + + name = cast(str, node.alias.value if node.alias else node.name.value) + + log.debug(f"Leave field {name}") + + if self.current_result is None: + + log.debug(f"Leave field {name}: returning None") + return {name: None} + + elif node.selection_set is None: + + field_type = self.type_info.get_type() + result_type = self.get_current_result_type(path) + + if log.isEnabledFor(logging.DEBUG): + log.debug(f" field type of {name} is {inspect(field_type)}") + log.debug(f" result type of {name} is {inspect(result_type)}") + + assert is_leaf_type(result_type) + + # Finally parsing a single scalar using the parse_value method + parsed_value = result_type.parse_value(self.current_result) + + return_value = {name: parsed_value} + else: + + partial_results = cast(List[Dict[str, Any]], node.selection_set) + + return_value = { + name: {k: v for pr in partial_results for k, v in pr.items()} + } + + # Go up a level in the result stack + self.result_stack.pop() + + log.debug(f"Leave field {name}: returning {return_value}") + + return return_value + + # Fragments + + def enter_fragment_definition( + self, node: FragmentDefinitionNode, *_args: Any + ) -> Union[None, VisitorActionEnum]: + + if log.isEnabledFor(logging.DEBUG): + log.debug(f"Enter fragment definition {node.name.value}.") + log.debug(f"visit_fragment={self.visit_fragment!s}") + + if self.visit_fragment: + return IDLE + else: + return REMOVE + + @staticmethod + def leave_fragment_definition( + node: FragmentDefinitionNode, *_args: Any + ) -> Dict[str, Any]: + + selections = cast(List[Dict[str, Any]], node.selection_set) + return {k: v for s in selections for k, v in s.items()} + + def leave_fragment_spread( + self, node: FragmentSpreadNode, *_args: Any + ) -> Dict[str, Any]: + + fragment_name = node.name.value + + log.debug(f"Start recursive fragment visit {fragment_name}") + + fragment_node = _get_fragment(self.document, fragment_name) + + fragment_result = parse_result_recursive( + self.schema, + self.document, + fragment_node, + self.current_result, + visit_fragment=True, + ) + + log.debug( + f"Result of recursive fragment visit {fragment_name}: {fragment_result}" + ) + + return cast(Dict[str, Any], fragment_result) + + @staticmethod + def leave_inline_fragment(node: InlineFragmentNode, *_args: Any) -> Dict[str, Any]: + + selections = cast(List[Dict[str, Any]], node.selection_set) + return {k: v for s in selections for k, v in s.items()} + + +def parse_result_recursive( + schema: GraphQLSchema, + document: DocumentNode, + node: Node, + result: Optional[Dict[str, Any]], + initial_type: Optional[GraphQLType] = None, + inside_list_level: int = 0, + visit_fragment: bool = False, + operation_name: Optional[str] = None, +) -> Any: + + if result is None: + return None + + type_info = TypeInfo(schema, initial_type=initial_type) + + visited = visit( + node, + TypeInfoVisitor( + type_info, + ParseResultVisitor( + schema, + document, + node, + result, + type_info=type_info, + inside_list_level=inside_list_level, + visit_fragment=visit_fragment, + operation_name=operation_name, + ), + ), + visitor_keys=RESULT_DOCUMENT_KEYS, + ) + + return visited + + +def parse_result( + schema: GraphQLSchema, + document: DocumentNode, + result: Optional[Dict[str, Any]], + operation_name: Optional[str] = None, +) -> Optional[Dict[str, Any]]: + """Unserialize a result received from a GraphQL backend. + + :param schema: the GraphQL schema + :param document: the document representing the query sent to the backend + :param result: the serialized result received from the backend + :param operation_name: the optional operation name + + :returns: a parsed result with scalars and enums parsed depending on + their definition in the schema. + + Given a schema, a query and a serialized result, + provide a new result with parsed values. + + If the result contains only built-in GraphQL scalars (String, Int, Float, ...) + then the parsed result should be unchanged. + + If the result contains custom scalars or enums, then those values + will be parsed with the parse_value method of the custom scalar or enum + definition in the schema.""" + + return parse_result_recursive( + schema, document, document, result, operation_name=operation_name + ) diff --git a/gql/variable_values.py b/gql/utilities/serialize_variable_values.py similarity index 86% rename from gql/variable_values.py rename to gql/utilities/serialize_variable_values.py index 7db7091a..833df8bd 100644 --- a/gql/variable_values.py +++ b/gql/utilities/serialize_variable_values.py @@ -17,7 +17,7 @@ from graphql.pyutils import inspect -def get_document_operation( +def _get_document_operation( document: DocumentNode, operation_name: Optional[str] = None ) -> OperationDefinitionNode: """Returns the operation which should be executed in the document. @@ -53,7 +53,13 @@ def get_document_operation( def serialize_value(type_: GraphQLType, value: Any) -> Any: """Given a GraphQL type and a Python value, return the serialized value. + This method will serialize the value recursively, entering into + lists and dicts. + Can be used to serialize Enums and/or Custom Scalars in variable values. + + :param type_: the GraphQL type + :param value: the provided value """ if value is None: @@ -93,13 +99,19 @@ def serialize_variable_values( """Given a GraphQL document and a schema, serialize the Dictionary of variable values. - Useful to serialize Enums and/or Custom Scalars in variable values + Useful to serialize Enums and/or Custom Scalars in variable values. + + :param schema: the GraphQL schema + :param document: the document representing the query sent to the backend + :param variable_values: the dictionnary of variable values which needs + to be serialized. + :param operation_name: the optional operation_name for the query. """ parsed_variable_values: Dict[str, Any] = {} # Find the operation in the document - operation = get_document_operation(document, operation_name=operation_name) + operation = _get_document_operation(document, operation_name=operation_name) # Serialize every variable value defined for the operation for var_def_node in operation.variable_definitions: diff --git a/gql/utilities/update_schema_enum.py b/gql/utilities/update_schema_enum.py new file mode 100644 index 00000000..80c73862 --- /dev/null +++ b/gql/utilities/update_schema_enum.py @@ -0,0 +1,69 @@ +from enum import Enum +from typing import Any, Dict, Mapping, Type, Union, cast + +from graphql import GraphQLEnumType, GraphQLSchema + + +def update_schema_enum( + schema: GraphQLSchema, + name: str, + values: Union[Dict[str, Any], Type[Enum]], + use_enum_values: bool = False, +): + """Update in the schema the GraphQLEnumType corresponding to the given name. + + Example:: + + from enum import Enum + + class Color(Enum): + RED = 0 + GREEN = 1 + BLUE = 2 + + update_schema_enum(schema, 'Color', Color) + + :param schema: a GraphQL Schema already containing the GraphQLEnumType type. + :param name: the name of the enum in the GraphQL schema + :param values: Either a Python Enum or a dict of values. The keys of the provided + values should correspond to the keys of the existing enum in the schema. + :param use_enum_values: By default, we configure the GraphQLEnumType to serialize + to enum instances (ie: .parse_value() returns Color.RED). + If use_enum_values is set to True, then .parse_value() returns 0. + use_enum_values=True is the defaut behaviour when passing an Enum + to a GraphQLEnumType. + """ + + # Convert Enum values to Dict + if isinstance(values, type): + if issubclass(values, Enum): + values = cast(Type[Enum], values) + if use_enum_values: + values = {enum.name: enum.value for enum in values} + else: + values = {enum.name: enum for enum in values} + + if not isinstance(values, Mapping): + raise TypeError(f"Invalid type for enum values: {type(values)}") + + # Find enum type in schema + schema_enum = schema.get_type(name) + + if schema_enum is None: + raise KeyError(f"Enum {name} not found in schema!") + + if not isinstance(schema_enum, GraphQLEnumType): + raise TypeError( + f'The type "{name}" is not a GraphQLEnumType, it is a {type(schema_enum)}' + ) + + # Replace all enum values + for enum_name, enum_value in schema_enum.values.items(): + try: + enum_value.value = values[enum_name] + except KeyError: + raise KeyError(f'Enum key "{enum_name}" not found in provided values!') + + # Delete the _value_lookup cached property + if "_value_lookup" in schema_enum.__dict__: + del schema_enum.__dict__["_value_lookup"] diff --git a/gql/utilities/update_schema_scalars.py b/gql/utilities/update_schema_scalars.py index d5434c6b..db3adb17 100644 --- a/gql/utilities/update_schema_scalars.py +++ b/gql/utilities/update_schema_scalars.py @@ -1,32 +1,60 @@ from typing import Iterable, List -from graphql import GraphQLError, GraphQLScalarType, GraphQLSchema +from graphql import GraphQLScalarType, GraphQLSchema + + +def update_schema_scalar(schema: GraphQLSchema, name: str, scalar: GraphQLScalarType): + """Update the scalar in a schema with the scalar provided. + + :param schema: the GraphQL schema + :param name: the name of the custom scalar type in the schema + :param scalar: a provided scalar type + + This can be used to update the default Custom Scalar implementation + when the schema has been provided from a text file or from introspection. + """ + + if not isinstance(scalar, GraphQLScalarType): + raise TypeError("Scalars should be instances of GraphQLScalarType.") + + schema_scalar = schema.get_type(name) + + if schema_scalar is None: + raise KeyError(f"Scalar '{name}' not found in schema.") + + if not isinstance(schema_scalar, GraphQLScalarType): + raise TypeError( + f'The type "{name}" is not a GraphQLScalarType,' + f" it is a {type(schema_scalar)}" + ) + + # Update the conversion methods + # Using setattr because mypy has a false positive + # https://github.com/python/mypy/issues/2427 + setattr(schema_scalar, "serialize", scalar.serialize) + setattr(schema_scalar, "parse_value", scalar.parse_value) + setattr(schema_scalar, "parse_literal", scalar.parse_literal) def update_schema_scalars(schema: GraphQLSchema, scalars: List[GraphQLScalarType]): """Update the scalars in a schema with the scalars provided. + :param schema: the GraphQL schema + :param scalars: a list of provided scalar types + This can be used to update the default Custom Scalar implementation when the schema has been provided from a text file or from introspection. + + If the name of the provided scalar is different than the name of + the custom scalar, then you should use the + :func:`update_schema_scalar ` method instead. """ if not isinstance(scalars, Iterable): - raise GraphQLError("Scalars argument should be a list of scalars.") + raise TypeError("Scalars argument should be a list of scalars.") for scalar in scalars: if not isinstance(scalar, GraphQLScalarType): - raise GraphQLError("Scalars should be instances of GraphQLScalarType.") - - try: - schema_scalar = schema.type_map[scalar.name] - except KeyError: - raise GraphQLError(f"Scalar '{scalar.name}' not found in schema.") - - assert isinstance(schema_scalar, GraphQLScalarType) + raise TypeError("Scalars should be instances of GraphQLScalarType.") - # Update the conversion methods - # Using setattr because mypy has a false positive - # https://github.com/python/mypy/issues/2427 - setattr(schema_scalar, "serialize", scalar.serialize) - setattr(schema_scalar, "parse_value", scalar.parse_value) - setattr(schema_scalar, "parse_literal", scalar.parse_literal) + update_schema_scalar(schema, scalar.name, scalar) diff --git a/tests/conftest.py b/tests/conftest.py index 004fa9df..519738cc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -105,6 +105,7 @@ async def go(app, *, port=None, **kwargs): # type: ignore "gql.transport.websockets", "gql.transport.phoenix_channel_websockets", "gql.dsl", + "gql.utilities.parse_result", ]: logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) diff --git a/tests/custom_scalars/test_custom_scalar_datetime.py b/tests/custom_scalars/test_datetime.py similarity index 89% rename from tests/custom_scalars/test_custom_scalar_datetime.py rename to tests/custom_scalars/test_datetime.py index 25c6bb31..169ce076 100644 --- a/tests/custom_scalars/test_custom_scalar_datetime.py +++ b/tests/custom_scalars/test_datetime.py @@ -112,7 +112,7 @@ def resolve_seconds(root, _info, interval): ) def test_shift_days(): - client = Client(schema=schema) + client = Client(schema=schema, parse_results=True, serialize_variables=True) now = datetime.fromisoformat("2021-11-12T11:58:13.461161") @@ -122,13 +122,11 @@ def test_shift_days(): "time": now, } - result = client.execute( - query, variable_values=variable_values, serialize_variables=True - ) + result = client.execute(query, variable_values=variable_values) print(result) - assert result["shiftDays"] == "2021-11-17T11:58:13.461161" + assert result["shiftDays"] == datetime.fromisoformat("2021-11-17T11:58:13.461161") @pytest.mark.skipif( @@ -144,11 +142,11 @@ def test_shift_days_serialized_manually_in_query(): }""" ) - result = client.execute(query) + result = client.execute(query, parse_result=True) print(result) - assert result["shiftDays"] == "2021-11-17T11:58:13.461161" + assert result["shiftDays"] == datetime.fromisoformat("2021-11-17T11:58:13.461161") @pytest.mark.skipif( @@ -156,7 +154,7 @@ def test_shift_days_serialized_manually_in_query(): ) def test_shift_days_serialized_manually_in_variables(): - client = Client(schema=schema) + client = Client(schema=schema, parse_results=True) query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") @@ -168,7 +166,7 @@ def test_shift_days_serialized_manually_in_variables(): print(result) - assert result["shiftDays"] == "2021-11-17T11:58:13.461161" + assert result["shiftDays"] == datetime.fromisoformat("2021-11-17T11:58:13.461161") @pytest.mark.skipif( @@ -176,7 +174,7 @@ def test_shift_days_serialized_manually_in_variables(): ) def test_latest(): - client = Client(schema=schema) + client = Client(schema=schema, parse_results=True) now = datetime.fromisoformat("2021-11-12T11:58:13.461161") in_five_days = datetime.fromisoformat("2021-11-17T11:58:13.461161") @@ -193,7 +191,7 @@ def test_latest(): print(result) - assert result["latest"] == in_five_days.isoformat() + assert result["latest"] == in_five_days @pytest.mark.skipif( diff --git a/tests/custom_scalars/test_enum_colors.py b/tests/custom_scalars/test_enum_colors.py new file mode 100644 index 00000000..2c7b887c --- /dev/null +++ b/tests/custom_scalars/test_enum_colors.py @@ -0,0 +1,325 @@ +from enum import Enum + +import pytest +from graphql import ( + GraphQLArgument, + GraphQLEnumType, + GraphQLField, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, +) + +from gql import Client, gql +from gql.utilities import update_schema_enum + + +class Color(Enum): + RED = 0 + GREEN = 1 + BLUE = 2 + YELLOW = 3 + CYAN = 4 + MAGENTA = 5 + + +RED = Color.RED +GREEN = Color.GREEN +BLUE = Color.BLUE +YELLOW = Color.YELLOW +CYAN = Color.CYAN +MAGENTA = Color.MAGENTA + +ALL_COLORS = [c for c in Color] + +ColorType = GraphQLEnumType("Color", {c.name: c for c in Color}) + + +def resolve_opposite(_root, _info, color): + opposite_colors = { + RED: CYAN, + GREEN: MAGENTA, + BLUE: YELLOW, + YELLOW: BLUE, + CYAN: RED, + MAGENTA: GREEN, + } + + return opposite_colors[color] + + +def resolve_all(_root, _info): + return ALL_COLORS + + +list_of_list_of_list = [[[RED, GREEN], [GREEN, BLUE]], [[YELLOW, CYAN], [MAGENTA, RED]]] + + +def resolve_list_of_list_of_list(_root, _info): + return list_of_list_of_list + + +def resolve_list_of_list(_root, _info): + return list_of_list_of_list[0] + + +def resolve_list(_root, _info): + return list_of_list_of_list[0][0] + + +queryType = GraphQLObjectType( + name="RootQueryType", + fields={ + "all": GraphQLField(GraphQLList(ColorType), resolve=resolve_all,), + "opposite": GraphQLField( + ColorType, + args={"color": GraphQLArgument(ColorType)}, + resolve=resolve_opposite, + ), + "list_of_list_of_list": GraphQLField( + GraphQLNonNull( + GraphQLList( + GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLList(ColorType)))) + ) + ), + resolve=resolve_list_of_list_of_list, + ), + "list_of_list": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLList(ColorType)))), + resolve=resolve_list_of_list, + ), + "list": GraphQLField( + GraphQLNonNull(GraphQLList(ColorType)), resolve=resolve_list, + ), + }, +) + +schema = GraphQLSchema(query=queryType) + + +def test_parse_value_enum(): + + result = ColorType.parse_value("RED") + + print(result) + + assert isinstance(result, Color) + assert result is RED + + +def test_serialize_enum(): + + result = ColorType.serialize(RED) + + print(result) + + assert result == "RED" + + +def test_get_all_colors(): + + query = gql("{all}") + + client = Client(schema=schema, parse_results=True) + + result = client.execute(query) + + print(result) + + all_colors = result["all"] + + assert all_colors == ALL_COLORS + + +def test_opposite_color_literal(): + + client = Client(schema=schema, parse_results=True) + + query = gql("{opposite(color: RED)}") + + result = client.execute(query) + + print(result) + + opposite_color = result["opposite"] + + assert isinstance(opposite_color, Color) + assert opposite_color == CYAN + + +def test_opposite_color_variable_serialized_manually(): + + client = Client(schema=schema, parse_results=True) + + query = gql( + """ + query GetOppositeColor($color: Color) { + opposite(color:$color) + }""" + ) + + variable_values = { + "color": "RED", + } + + result = client.execute(query, variable_values=variable_values) + + print(result) + + opposite_color = result["opposite"] + + assert isinstance(opposite_color, Color) + assert opposite_color == CYAN + + +def test_opposite_color_variable_serialized_by_gql(): + + client = Client(schema=schema, parse_results=True) + + query = gql( + """ + query GetOppositeColor($color: Color) { + opposite(color:$color) + }""" + ) + + variable_values = { + "color": RED, + } + + result = client.execute( + query, variable_values=variable_values, serialize_variables=True + ) + + print(result) + + opposite_color = result["opposite"] + + assert isinstance(opposite_color, Color) + assert opposite_color == CYAN + + +def test_list(): + + query = gql("{list}") + + client = Client(schema=schema, parse_results=True) + + result = client.execute(query) + + print(result) + + big_list = result["list"] + + assert big_list == list_of_list_of_list[0][0] + + +def test_list_of_list(): + + query = gql("{list_of_list}") + + client = Client(schema=schema, parse_results=True) + + result = client.execute(query) + + print(result) + + big_list = result["list_of_list"] + + assert big_list == list_of_list_of_list[0] + + +def test_list_of_list_of_list(): + + query = gql("{list_of_list_of_list}") + + client = Client(schema=schema, parse_results=True) + + result = client.execute(query) + + print(result) + + big_list = result["list_of_list_of_list"] + + assert big_list == list_of_list_of_list + + +def test_update_schema_enum(): + + assert schema.get_type("Color").parse_value("RED") == Color.RED + + # Using values + + update_schema_enum(schema, "Color", Color, use_enum_values=True) + + assert schema.get_type("Color").parse_value("RED") == 0 + assert schema.get_type("Color").serialize(1) == "GREEN" + + update_schema_enum(schema, "Color", Color) + + assert schema.get_type("Color").parse_value("RED") == Color.RED + assert schema.get_type("Color").serialize(Color.RED) == "RED" + + +def test_update_schema_enum_errors(): + + with pytest.raises(KeyError) as exc_info: + update_schema_enum(schema, "Corlo", Color) + + assert "Enum Corlo not found in schema!" in str(exc_info) + + with pytest.raises(TypeError) as exc_info: + update_schema_enum(schema, "Color", 6) + + assert "Invalid type for enum values: " in str(exc_info) + + with pytest.raises(TypeError) as exc_info: + update_schema_enum(schema, "RootQueryType", Color) + + assert 'The type "RootQueryType" is not a GraphQLEnumType, it is a' in str(exc_info) + + with pytest.raises(KeyError) as exc_info: + update_schema_enum(schema, "Color", {"RED": Color.RED}) + + assert 'Enum key "GREEN" not found in provided values!' in str(exc_info) + + +def test_parse_results_with_operation_type(): + + client = Client(schema=schema, parse_results=True) + + query = gql( + """ + query GetAll { + all + } + query GetOppositeColor($color: Color) { + opposite(color:$color) + } + query GetOppositeColor2($color: Color) { + other_opposite:opposite(color:$color) + } + query GetOppositeColor3 { + opposite(color: YELLOW) + } + query GetListOfListOfList { + list_of_list_of_list + } + """ + ) + + variable_values = { + "color": "RED", + } + + result = client.execute( + query, variable_values=variable_values, operation_name="GetOppositeColor" + ) + + print(result) + + opposite_color = result["opposite"] + + assert isinstance(opposite_color, Color) + assert opposite_color == CYAN diff --git a/tests/custom_scalars/test_custom_scalar_json.py b/tests/custom_scalars/test_json.py similarity index 98% rename from tests/custom_scalars/test_custom_scalar_json.py rename to tests/custom_scalars/test_json.py index 80f99850..9659d0a5 100644 --- a/tests/custom_scalars/test_custom_scalar_json.py +++ b/tests/custom_scalars/test_json.py @@ -94,7 +94,7 @@ def resolve_add_player(root, _info, player): def test_json_value_output(): - client = Client(schema=schema) + client = Client(schema=schema, parse_results=True) query = gql("query {players}") diff --git a/tests/custom_scalars/test_custom_scalar_money.py b/tests/custom_scalars/test_money.py similarity index 80% rename from tests/custom_scalars/test_custom_scalar_money.py rename to tests/custom_scalars/test_money.py index 238308a9..1b65ec98 100644 --- a/tests/custom_scalars/test_custom_scalar_money.py +++ b/tests/custom_scalars/test_money.py @@ -11,6 +11,7 @@ GraphQLField, GraphQLFloat, GraphQLInt, + GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, @@ -20,8 +21,7 @@ from gql import Client, gql from gql.transport.exceptions import TransportQueryError -from gql.utilities import update_schema_scalars -from gql.variable_values import serialize_value +from gql.utilities import serialize_value, update_schema_scalar, update_schema_scalars from ..conftest import MS @@ -82,9 +82,34 @@ def parse_money_literal( parse_literal=parse_money_literal, ) +root_value = { + "balance": Money(42, "DM"), + "friends_balance": [Money(12, "EUR"), Money(24, "EUR"), Money(150, "DM")], + "countries_balance": { + "Belgium": Money(15000, "EUR"), + "Luxembourg": Money(99999, "EUR"), + }, +} + def resolve_balance(root, _info): - return root + return root["balance"] + + +def resolve_friends_balance(root, _info): + return root["friends_balance"] + + +def resolve_countries_balance(root, _info): + return root["countries_balance"] + + +def resolve_belgium_balance(countries_balance, _info): + return countries_balance["Belgium"] + + +def resolve_luxembourg_balance(countries_balance, _info): + return countries_balance["Luxembourg"] def resolve_to_euros(_root, _info, money): @@ -97,6 +122,18 @@ def resolve_to_euros(_root, _info, money): raise ValueError("Cannot convert to euros: " + inspect(money)) +countriesBalance = GraphQLObjectType( + name="CountriesBalance", + fields={ + "Belgium": GraphQLField( + GraphQLNonNull(MoneyScalar), resolve=resolve_belgium_balance + ), + "Luxembourg": GraphQLField( + GraphQLNonNull(MoneyScalar), resolve=resolve_luxembourg_balance + ), + }, +) + queryType = GraphQLObjectType( name="RootQueryType", fields={ @@ -106,6 +143,12 @@ def resolve_to_euros(_root, _info, money): args={"money": GraphQLArgument(MoneyScalar)}, resolve=resolve_to_euros, ), + "friends_balance": GraphQLField( + GraphQLList(MoneyScalar), resolve=resolve_friends_balance + ), + "countries_balance": GraphQLField( + GraphQLNonNull(countriesBalance), resolve=resolve_countries_balance, + ), }, ) @@ -133,14 +176,12 @@ async def subscribe_spend_all(_root, _info, money): }, ) -root_value = Money(42, "DM") - schema = GraphQLSchema(query=queryType, subscription=subscriptionType,) def test_custom_scalar_in_output(): - client = Client(schema=schema) + client = Client(schema=schema, parse_results=True) query = gql("{balance}") @@ -148,7 +189,53 @@ def test_custom_scalar_in_output(): print(result) - assert result["balance"] == serialize_money(root_value) + assert result["balance"] == root_value["balance"] + + +def test_custom_scalar_in_output_embedded_fragments(): + + client = Client(schema=schema, parse_results=True) + + query = gql( + """ + fragment LuxMoneyInternal on CountriesBalance { + ... on CountriesBalance { + Luxembourg + } + } + query { + countries_balance { + Belgium + ...LuxMoney + } + } + fragment LuxMoney on CountriesBalance { + ...LuxMoneyInternal + } + """ + ) + + result = client.execute(query, root_value=root_value) + + print(result) + + belgium_money = result["countries_balance"]["Belgium"] + assert belgium_money == Money(15000, "EUR") + luxembourg_money = result["countries_balance"]["Luxembourg"] + assert luxembourg_money == Money(99999, "EUR") + + +def test_custom_scalar_list_in_output(): + + client = Client(schema=schema, parse_results=True) + + query = gql("{friends_balance}") + + result = client.execute(query, root_value=root_value) + + print(result) + + assert result["friends_balance"] == root_value["friends_balance"] def test_custom_scalar_in_input_query(): @@ -301,16 +388,18 @@ def test_custom_scalar_subscribe_in_input_variable_values_serialized(): variable_values = {"money": money_value} - expected_result = {"spend": {"amount": 10, "currency": "DM"}} + expected_result = {"spend": Money(10, "DM")} for result in client.subscribe( query, variable_values=variable_values, root_value=root_value, serialize_variables=True, + parse_result=True, ): print(f"result = {result!r}") - expected_result["spend"]["amount"] = expected_result["spend"]["amount"] - 1 + assert isinstance(result["spend"], Money) + expected_result["spend"] = Money(expected_result["spend"].amount - 1, "DM") assert expected_result == result @@ -385,7 +474,7 @@ async def test_custom_scalar_in_output_with_transport(event_loop, aiohttp_server print(result) - assert result["balance"] == serialize_money(root_value) + assert result["balance"] == serialize_money(root_value["balance"]) @pytest.mark.asyncio @@ -533,7 +622,8 @@ async def test_update_schema_scalars(event_loop, aiohttp_server): # Update the schema MoneyScalar default implementation from # introspection with our provided conversion methods - update_schema_scalars(session.client.schema, [MoneyScalar]) + # update_schema_scalars(session.client.schema, [MoneyScalar]) + update_schema_scalar(session.client.schema, "Money", MoneyScalar) query = gql("query myquery($money: Money) {toEuros(money: $money)}") @@ -549,17 +639,24 @@ async def test_update_schema_scalars(event_loop, aiohttp_server): def test_update_schema_scalars_invalid_scalar(): - with pytest.raises(GraphQLError) as exc_info: + with pytest.raises(TypeError) as exc_info: update_schema_scalars(schema, [int]) exception = exc_info.value assert str(exception) == "Scalars should be instances of GraphQLScalarType." + with pytest.raises(TypeError) as exc_info: + update_schema_scalar(schema, "test", int) + + exception = exc_info.value + + assert str(exception) == "Scalars should be instances of GraphQLScalarType." + def test_update_schema_scalars_invalid_scalar_argument(): - with pytest.raises(GraphQLError) as exc_info: + with pytest.raises(TypeError) as exc_info: update_schema_scalars(schema, MoneyScalar) exception = exc_info.value @@ -571,12 +668,24 @@ def test_update_schema_scalars_scalar_not_found_in_schema(): NotFoundScalar = GraphQLScalarType(name="abcd",) - with pytest.raises(GraphQLError) as exc_info: + with pytest.raises(KeyError) as exc_info: update_schema_scalars(schema, [MoneyScalar, NotFoundScalar]) exception = exc_info.value - assert str(exception) == "Scalar 'abcd' not found in schema." + assert "Scalar 'abcd' not found in schema." in str(exception) + + +def test_update_schema_scalars_scalar_type_is_not_a_scalar_in_schema(): + + with pytest.raises(TypeError) as exc_info: + update_schema_scalar(schema, "CountriesBalance", MoneyScalar) + + exception = exc_info.value + + assert 'The type "CountriesBalance" is not a GraphQLScalarType, it is a' in str( + exception + ) @pytest.mark.asyncio @@ -588,7 +697,7 @@ async def test_custom_scalar_serialize_variables_sync_transport( server, transport = await make_sync_money_transport(aiohttp_server) def test_code(): - with Client(schema=schema, transport=transport,) as session: + with Client(schema=schema, transport=transport, parse_results=True) as session: query = gql("query myquery($money: Money) {toEuros(money: $money)}") diff --git a/tests/starwars/test_parse_results.py b/tests/starwars/test_parse_results.py new file mode 100644 index 00000000..23073839 --- /dev/null +++ b/tests/starwars/test_parse_results.py @@ -0,0 +1,191 @@ +import pytest +from graphql import GraphQLError + +from gql import gql +from gql.utilities import parse_result +from tests.starwars.schema import StarWarsSchema + + +def test_hero_name_and_friends_query(): + query = gql( + """ + query HeroNameAndFriendsQuery { + hero { + id + friends { + name + } + name + } + } + """ + ) + result = { + "hero": { + "id": "2001", + "friends": [ + {"name": "Luke Skywalker"}, + {"name": "Han Solo"}, + {"name": "Leia Organa"}, + ], + "name": "R2-D2", + } + } + + parsed_result = parse_result(StarWarsSchema, query, result) + + assert result == parsed_result + + +def test_key_not_found_in_result(): + + query = gql( + """ + { + hero { + id + } + } + """ + ) + + # Backend returned an invalid result without the hero key + # Should be impossible. In that case, we ignore the missing key + result = {} + + parsed_result = parse_result(StarWarsSchema, query, result) + + assert result == parsed_result + + +def test_invalid_result_raise_error(): + + query = gql( + """ + { + hero { + id + } + } + """ + ) + + result = {"hero": 5} + + with pytest.raises(GraphQLError) as exc_info: + + parse_result(StarWarsSchema, query, result) + + assert "Invalid result for container of field id: 5" in str(exc_info) + + +def test_fragment(): + + query = gql( + """ + query UseFragment { + luke: human(id: "1000") { + ...HumanFragment + } + leia: human(id: "1003") { + ...HumanFragment + } + } + fragment HumanFragment on Human { + name + homePlanet + } + """ + ) + + result = { + "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, + "leia": {"name": "Leia Organa", "homePlanet": "Alderaan"}, + } + + parsed_result = parse_result(StarWarsSchema, query, result) + + assert result == parsed_result + + +def test_fragment_not_found(): + + query = gql( + """ + query UseFragment { + luke: human(id: "1000") { + ...HumanFragment + } + } + """ + ) + + result = { + "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, + } + + with pytest.raises(GraphQLError) as exc_info: + + parse_result(StarWarsSchema, query, result) + + assert 'Fragment "HumanFragment" not found in document!' in str(exc_info) + + +def test_return_none_if_result_is_none(): + + query = gql( + """ + query { + hero { + id + } + } + """ + ) + + result = None + + assert parse_result(StarWarsSchema, query, result) is None + + +def test_null_result_is_allowed(): + + query = gql( + """ + query { + hero { + id + } + } + """ + ) + + result = {"hero": None} + + parsed_result = parse_result(StarWarsSchema, query, result) + + assert result == parsed_result + + +def test_inline_fragment(): + + query = gql( + """ + query UseFragment { + luke: human(id: "1000") { + ... on Human { + name + homePlanet + } + } + } + """ + ) + + result = { + "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, + } + + parsed_result = parse_result(StarWarsSchema, query, result) + + assert result == parsed_result diff --git a/tests/starwars/test_query.py b/tests/starwars/test_query.py index 62890222..520018c1 100644 --- a/tests/starwars/test_query.py +++ b/tests/starwars/test_query.py @@ -107,7 +107,7 @@ def test_nested_query(client): ], } } - result = client.execute(query) + result = client.execute(query, parse_result=False) assert result == expected diff --git a/tests/starwars/test_subscription.py b/tests/starwars/test_subscription.py index 3753ab2f..2516701f 100644 --- a/tests/starwars/test_subscription.py +++ b/tests/starwars/test_subscription.py @@ -53,7 +53,9 @@ async def test_subscription_support_using_client(): async with Client(schema=StarWarsSchema) as session: results = [ result["reviewAdded"] - async for result in session.subscribe(subs, variable_values=params) + async for result in session.subscribe( + subs, variable_values=params, parse_result=False + ) ] assert results == expected diff --git a/tests/test_async_client_validation.py b/tests/test_async_client_validation.py index 1402aa59..107bd6c2 100644 --- a/tests/test_async_client_validation.py +++ b/tests/test_async_client_validation.py @@ -112,7 +112,7 @@ async def test_async_client_validation( expected = [] async for result in session.subscribe( - subscription, variable_values=variable_values + subscription, variable_values=variable_values, parse_result=False ): review = result["reviewAdded"]