diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c6ef18b..6d61011 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -29,6 +29,6 @@ jobs: pytest - name: Linters run: | - ruff ariadne_graphql_proxy tests + ruff check ariadne_graphql_proxy tests mypy ariadne_graphql_proxy --ignore-missing-imports --check-untyped-defs black --check ariadne_graphql_proxy tests diff --git a/CHANGELOG.md b/CHANGELOG.md index 7d37cfa..3ef0ab1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,11 @@ # CHANGELOG -## UNRELEASED +## 0.3.0 (UNRELEASED) - Added `CacheSerializer`, `NoopCacheSerializer` and `JSONCacheSerializer`. Changed `CacheBackend`, `InMemoryCache`, `CloudflareCacheBackend` and `DynamoDBCacheBackend` to accept `serializer` initialization option. - Fixed schema proxy returning an error when variable defined in an operation is missing from its variables. - Improved custom headers handling in `ProxyResolver` and `ProxySchema`. +- Added fields dependencies configuration option to `ProxySchema`. ## 0.2.0 (2023-09-25) diff --git a/GUIDE.md b/GUIDE.md index 7849585..b0aaab1 100644 --- a/GUIDE.md +++ b/GUIDE.md @@ -572,6 +572,71 @@ If `proxy_headers` is a callable, it will be called with single argument (`conte If `proxy_headers` is `None` or `False`, no headers are proxied to the other service. +## Fields dependencies + +In situations where field depends on data from sibling fields in order to be resolved, `ProxySchema` can be configured to include those additional fields in root value query sent to remote schema. + +Below example pulls a remote schema that defines `Product` type, extends this type with `image: String` field, and then uses `ProxySchema.add_field_dependencies` to configure `{ metadata { thumb} }` as additional fields to retrieve when `image` field is queried. It also includes custom resolver for `image` field that uses this additional data: + + +```python +from ariadne.asgi import GraphQL +from ariadne_graphql_proxy import ( + ProxySchema, + get_context_value, + set_resolver, +) +from graphql import build_ast_schema, parse + + +proxy_schema = ProxySchema() + +# Store schema ID for remote schema +remote_schema_id = proxy_schema.add_remote_schema( + "https://example.com/graphql/", +) + +# Extend Product type with additional image field +proxy_schema.add_schema( + build_ast_schema( + parse( + """ + type Product { + image: String + } + """ + ) + ) +) + +# Configure proxy schema to retrieve thumb from metadata +# from remote schema when image is queried +proxy_schema.add_field_dependencies( + remote_schema_id, "Product", "image", "{ metadata { thumb } }" +) + +# Create schema instance +final_schema = proxy_schema.get_final_schema() + + +# Add product image resolver +def resolve_product_image(obj, info): + return obj["metadata"]["thumb"] + + +set_resolver(final_schema, "Product", "image", resolve_product_image) + + +# Setup Ariadne ASGI GraphQL application +app = GraphQL( + final_schema, + context_value=get_context_value, + root_value=proxy_schema.root_resolver, + debug=True, +) +``` + + ## Cache framework Ariadne GraphQL Proxy implements basic cache framework that enables of caching parts of GraphQL queries. @@ -855,6 +920,34 @@ def add_delayed_fields(self, delayed_fields: Dict[str, List[str]]): Sets specific fields in schema as delayed. Delayed fields are excluded from queries ran by `root_resolver` against the remote GraphQL APIs. +#### `delayed_fields` + +This is a dict of type name and fields names lists: + +```python +{"Type": ["field", "otherField"], "OtherType": ["field"]} +``` + + +### `add_field_dependencies` + +```python +def add_field_dependencies( + self, schema_id: int, type_name: str, field_name: str, query: str +): +``` + +Adds fields specified in `query` as dependencies for `field_name` of `type_name` that should be retrieved from schema with `schema_id`. + + +#### Required arguments + +- `schema_id`: an `int` with ID of schema returned by `add_remote_schema` or `add_schema`. +- `type_name`: a `str` with name of type for which dependencies will be set. +- `field_name`: a `str` with name of field which dependencies will be set. +- `query`: a `str` with additional fields to fetch when `field_name` is included, eg. `{ metadata { key value} }`. + + ### `add_foreign_key` ```python diff --git a/ariadne_graphql_proxy/__init__.py b/ariadne_graphql_proxy/__init__.py index b46767a..2961263 100644 --- a/ariadne_graphql_proxy/__init__.py +++ b/ariadne_graphql_proxy/__init__.py @@ -44,6 +44,7 @@ from .query_filter import QueryFilter, QueryFilterContext from .remote_schema import get_remote_schema from .resolvers import set_resolver, unset_resolver +from .selections import merge_selection_sets, merge_selections __all__ = [ "ForeignKeyResolver", @@ -84,6 +85,8 @@ "merge_objects", "merge_scalars", "merge_schemas", + "merge_selection_sets", + "merge_selections", "merge_type_maps", "merge_types", "merge_unions", diff --git a/ariadne_graphql_proxy/proxy_schema.py b/ariadne_graphql_proxy/proxy_schema.py index 827035c..dee515c 100644 --- a/ariadne_graphql_proxy/proxy_schema.py +++ b/ariadne_graphql_proxy/proxy_schema.py @@ -11,6 +11,10 @@ GraphQLSchema, GraphQLUnionType, GraphQLWrappingType, + OperationDefinitionNode, + OperationType, + SelectionSetNode, + parse, print_ast, ) from httpx import AsyncClient @@ -20,6 +24,7 @@ from .proxy_root_value import ProxyRootValue from .query_filter import QueryFilter from .remote_schema import get_remote_schema +from .selections import merge_selection_sets from .standard_types import STANDARD_TYPES, add_missing_scalar_types from .str_to_field import ( get_field_definition_from_str, @@ -46,6 +51,7 @@ def __init__( self.fields_types: Dict[str, Dict[str, str]] = {} self.unions: Dict[str, List[str]] = {} self.foreign_keys: Dict[str, Dict[str, List[str]]] = {} + self.dependencies: Dict[int, Dict[str, Dict[str, SelectionSetNode]]] = {} self.proxy_root_value = proxy_root_value @@ -176,8 +182,105 @@ def add_foreign_key( if field_name in self.foreign_keys[type_name]: raise ValueError(f"Foreign key already exists on {type_name}.{field_name}") + for schema_dependencies in self.dependencies.values(): + if ( + type_name in schema_dependencies + and field_name in schema_dependencies[type_name] + ): + raise ValueError( + f"Foreign key can't be created for {type_name}.{field_name} because " + "field dependencies were previously defined for it." + ) + self.foreign_keys[type_name][field_name] = [on] if isinstance(on, str) else on + def add_field_dependencies( + self, schema_id: int, type_name: str, field_name: str, query: str + ): + if type_name in ("Query", "Mutation", "Subscription"): + raise ValueError( + f"Defining field dependencies for {type_name} fields is not allowed." + ) + + if ( + type_name in self.foreign_keys + and field_name in self.foreign_keys[type_name] + ): + raise ValueError( + f"Dependencies can't be created for {type_name}.{field_name} because " + "foreign key was previously defined for it." + ) + + if schema_id < 0 or schema_id + 1 > len(self.urls): + raise ValueError(f"Schema with ID '{schema_id}' doesn't exist.") + if not self.urls[schema_id]: + raise ValueError(f"Schema with ID '{schema_id}' is not a remote schema.") + + schema = self.schemas[schema_id] + if type_name not in schema.type_map: + raise ValueError( + f"Type '{type_name}' doesn't exist in schema with ID '{schema_id}'." + ) + + schema_type = schema.type_map[type_name] + if not isinstance(schema_type, GraphQLObjectType): + raise ValueError( + f"Type '{type_name}' in schema with ID '{schema_id}' is not " + "an object type." + ) + + self.validate_field_with_dependencies(type_name, field_name) + + if schema_id not in self.dependencies: + self.dependencies[schema_id] = {} + if type_name not in self.dependencies[schema_id]: + self.dependencies[schema_id][type_name] = {} + + selection_set = self.parse_field_dependencies(field_name, query) + + type_dependencies = self.dependencies[schema_id][type_name] + if not type_dependencies.get(field_name): + type_dependencies[field_name] = selection_set + else: + type_dependencies[field_name] = merge_selection_sets( + type_dependencies[field_name], selection_set + ) + + def parse_field_dependencies(self, field_name: str, query: str) -> SelectionSetNode: + clean_query = query.strip() + if not clean_query.startswith("{") or not clean_query.endswith("}"): + raise ValueError( + f"'{field_name}' field dependencies should be defined as a single " + "GraphQL operation, e.g.: '{ field other { subfield } }'." + ) + + ast = parse(clean_query) + + if ( + not len(ast.definitions) == 1 + or not isinstance(ast.definitions[0], OperationDefinitionNode) + or ast.definitions[0].operation != OperationType.QUERY + ): + raise ValueError( + f"'{field_name}' field dependencies should be defined as a single " + "GraphQL operation, e.g.: '{ field other { subfield } }'." + ) + + return ast.definitions[0].selection_set + + def validate_field_with_dependencies(self, type_name: str, field_name: str) -> None: + for schema in self.schemas: + if ( + type_name in schema.type_map + and isinstance(schema.type_map[type_name], GraphQLObjectType) + and field_name in schema.type_map[type_name].fields # type: ignore + ): + return + + raise ValueError( + f"Type '{type_name}' doesn't define the '{field_name}' field in any of schemas." + ) + def add_delayed_fields(self, delayed_fields: Dict[str, List[str]]): for type_name, type_fields in delayed_fields.items(): if type_name not in self.fields_map: @@ -227,6 +330,7 @@ def get_final_schema(self) -> GraphQLSchema: self.fields_types, self.unions, self.foreign_keys, + self.dependencies, ) return self.schema diff --git a/ariadne_graphql_proxy/query_filter.py b/ariadne_graphql_proxy/query_filter.py index feb5f49..3751cac 100644 --- a/ariadne_graphql_proxy/query_filter.py +++ b/ariadne_graphql_proxy/query_filter.py @@ -14,6 +14,8 @@ VariableNode, ) +from .selections import merge_selections + class QueryFilterContext: schema_id: int @@ -35,6 +37,7 @@ def __init__( fields_types: Dict[str, Dict[str, str]], unions: Dict[str, List[str]], foreign_keys: Dict[str, Dict[str, List[str]]], + dependencies: Dict[int, Dict[str, Dict[str, SelectionSetNode]]], ): self.schema = schema self.schemas = schemas @@ -42,6 +45,7 @@ def __init__( self.fields_types = fields_types self.unions = unions self.foreign_keys = foreign_keys + self.dependencies = dependencies def split_query( self, document: DocumentNode @@ -189,12 +193,22 @@ def filter_field_node( else: type_fields = self.fields_map[type_name] + fields_dependencies = self.get_type_fields_dependencies( + context.schema_id, type_name + ) + new_selections: List[SelectionNode] = [] for selection in field_node.selection_set.selections: if isinstance(selection, FieldNode): + field_name = selection.name.value + if fields_dependencies and field_name in fields_dependencies: + new_selections = merge_selections( + new_selections, fields_dependencies[field_name].selections + ) + if ( - selection.name.value not in type_fields - or context.schema_id not in type_fields[selection.name.value] + field_name not in type_fields + or context.schema_id not in type_fields[field_name] ): continue @@ -244,12 +258,22 @@ def filter_inline_fragment_node( type_name = fragment_node.type_condition.name.value type_fields = self.fields_map[type_name] + fields_dependencies = self.get_type_fields_dependencies( + context.schema_id, type_name + ) + new_selections: List[SelectionNode] = [] for selection in fragment_node.selection_set.selections: if isinstance(selection, FieldNode): + field_name = selection.name.value + if fields_dependencies and field_name in fields_dependencies: + new_selections = merge_selections( + new_selections, fields_dependencies[field_name].selections + ) + if ( - selection.name.value not in type_fields - or context.schema_id not in type_fields[selection.name.value] + field_name not in type_fields + or context.schema_id not in type_fields[field_name] ): continue @@ -294,12 +318,22 @@ def filter_fragment_spread_node( type_name = fragment.type_condition.name.value type_fields = self.fields_map[type_name] + fields_dependencies = self.get_type_fields_dependencies( + context.schema_id, type_name + ) + new_selections: List[SelectionNode] = [] for selection in fragment.selection_set.selections: if isinstance(selection, FieldNode): + field_name = selection.name.value + if fields_dependencies and field_name in fields_dependencies: + new_selections = merge_selections( + new_selections, fields_dependencies[field_name].selections + ) + if ( - selection.name.value not in type_fields - or context.schema_id not in type_fields[selection.name.value] + field_name not in type_fields + or context.schema_id not in type_fields[field_name] ): continue @@ -347,3 +381,13 @@ def inline_fragment_spread_node( selections=tuple(selections), ), ) + + def get_type_fields_dependencies( + self, + schema_id: int, + type_name: str, + ) -> Optional[Dict[str, SelectionSetNode]]: + if schema_id in self.dependencies and type_name in self.dependencies[schema_id]: + return self.dependencies[schema_id][type_name] + + return None diff --git a/ariadne_graphql_proxy/selections.py b/ariadne_graphql_proxy/selections.py new file mode 100644 index 0000000..875d7aa --- /dev/null +++ b/ariadne_graphql_proxy/selections.py @@ -0,0 +1,53 @@ +from typing import Dict, Sequence, List, cast + +from graphql import FieldNode, SelectionNode, SelectionSetNode + + +def merge_selection_sets( + set_a: SelectionSetNode, set_b: SelectionSetNode +) -> SelectionSetNode: + return SelectionSetNode( + selections=tuple(merge_selections(set_a.selections, set_b.selections)), + ) + + +def merge_selections( + set_a: Sequence[SelectionNode], set_b: Sequence[SelectionNode] +) -> List[SelectionNode]: + final_set: List[SelectionNode] = list(set_a) + + index: Dict[str, int] = {} + for i, field in enumerate(final_set): + if isinstance(field, FieldNode): + index[(field.alias or field.name).value] = i + + for field in set_b: + if isinstance(field, FieldNode): + field_name = (field.alias or field.name).value + if field_name in index: + field_index = index[field_name] + other_field = cast(FieldNode, final_set[field_index]) + if other_field.selection_set and field.selection_set: + final_set[field_index] = FieldNode( + directives=other_field.directives, + alias=other_field.alias, + name=field.name, + arguments=other_field.arguments, + selection_set=merge_selection_sets( + other_field.selection_set, field.selection_set + ), + ) + elif other_field.selection_set or field.selection_set: + final_set[field_index] = FieldNode( + directives=other_field.directives, + alias=other_field.alias, + name=field.name, + arguments=other_field.arguments, + selection_set=( + other_field.selection_set or field.selection_set + ), + ) + else: + final_set.append(field) + + return final_set diff --git a/tests/test_merge_selection_sets.py b/tests/test_merge_selection_sets.py new file mode 100644 index 0000000..c091cb4 --- /dev/null +++ b/tests/test_merge_selection_sets.py @@ -0,0 +1,82 @@ +from textwrap import dedent + +from graphql import parse, print_ast + +from ariadne_graphql_proxy import merge_selection_sets + + +def test_merge_selection_sets_merges_two_flat_sets(): + set_a = parse("{ hello }").definitions[0].selection_set + set_b = parse("{ world }").definitions[0].selection_set + + result = merge_selection_sets(set_a, set_b) + assert ( + print_ast(result) + == dedent( + """ + { + hello + world + } + """ + ).strip() + ) + + +def test_merge_selection_sets_merges_two_overlapping_flat_sets(): + set_a = parse("{ hello world }").definitions[0].selection_set + set_b = parse("{ world }").definitions[0].selection_set + + result = merge_selection_sets(set_a, set_b) + assert ( + print_ast(result) + == dedent( + """ + { + hello + world + } + """ + ).strip() + ) + + +def test_merge_selection_sets_keeps_nested_selections(): + set_a = parse("{ hello { sub } }").definitions[0].selection_set + set_b = parse("{ world }").definitions[0].selection_set + + result = merge_selection_sets(set_a, set_b) + assert ( + print_ast(result) + == dedent( + """ + { + hello { + sub + } + world + } + """ + ).strip() + ) + + +def test_merge_selection_sets_merges_selection_sets_recursively(): + set_a = parse("{ hello { sub } }").definitions[0].selection_set + set_b = parse("{ hello { set } world }").definitions[0].selection_set + + result = merge_selection_sets(set_a, set_b) + assert ( + print_ast(result) + == dedent( + """ + { + hello { + sub + set + } + world + } + """ + ).strip() + ) diff --git a/tests/test_merge_selections.py b/tests/test_merge_selections.py new file mode 100644 index 0000000..cfe6253 --- /dev/null +++ b/tests/test_merge_selections.py @@ -0,0 +1,82 @@ +from textwrap import dedent + +from graphql import SelectionSetNode, parse, print_ast + +from ariadne_graphql_proxy import merge_selections + + +def test_merge_selections_merges_two_flat_sets(): + set_a = parse("{ hello }").definitions[0].selection_set.selections + set_b = parse("{ world }").definitions[0].selection_set.selections + + result = merge_selections(set_a, set_b) + assert ( + print_ast(SelectionSetNode(selections=result)) + == dedent( + """ + { + hello + world + } + """ + ).strip() + ) + + +def test_merge_selections_merges_two_overlapping_flat_sets(): + set_a = parse("{ hello world }").definitions[0].selection_set.selections + set_b = parse("{ world }").definitions[0].selection_set.selections + + result = merge_selections(set_a, set_b) + assert ( + print_ast(SelectionSetNode(selections=result)) + == dedent( + """ + { + hello + world + } + """ + ).strip() + ) + + +def test_merge_selections_keeps_nested_selections(): + set_a = parse("{ hello { sub } }").definitions[0].selection_set.selections + set_b = parse("{ world }").definitions[0].selection_set.selections + + result = merge_selections(set_a, set_b) + assert ( + print_ast(SelectionSetNode(selections=result)) + == dedent( + """ + { + hello { + sub + } + world + } + """ + ).strip() + ) + + +def test_merge_selections_merges_selection_sets_recursively(): + set_a = parse("{ hello { sub } }").definitions[0].selection_set.selections + set_b = parse("{ hello { set } world }").definitions[0].selection_set.selections + + result = merge_selections(set_a, set_b) + assert ( + print_ast(SelectionSetNode(selections=result)) + == dedent( + """ + { + hello { + sub + set + } + world + } + """ + ).strip() + ) diff --git a/tests/test_proxy_schema.py b/tests/test_proxy_schema.py index 3d24bd7..068b282 100644 --- a/tests/test_proxy_schema.py +++ b/tests/test_proxy_schema.py @@ -1263,3 +1263,233 @@ async def test_root_value_for_remote_schema_excludes_extensions( "name": "Test", }, } + + +@pytest.mark.asyncio +async def test_add_field_dependencies_for_nonexisting_schema_raises_error( + httpx_mock, schema_json +): + httpx_mock.add_response(json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies( + schema_id + 1, "Complex", "invalid", "{ group { name } }" + ) + + assert "Schema with ID '1' doesn't exist." == str(exc_info.value) + + +@pytest.mark.asyncio +async def test_add_field_dependencies_for_local_schema_raises_error(schema): + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_schema(schema) + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies( + schema_id, "Complex", "invalid", "{ group { name } }" + ) + + assert "Schema with ID '0' is not a remote schema." == str(exc_info.value) + + +@pytest.mark.asyncio +async def test_add_field_dependencies_for_query_field_raises_error( + httpx_mock, schema_json +): + httpx_mock.add_response(json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies(schema_id, "Query", "basic", "{ complex }") + + assert "Defining field dependencies for Query fields is not allowed." == str( + exc_info.value + ) + + +@pytest.mark.asyncio +async def test_add_field_dependencies_for_mutation_field_raises_error( + httpx_mock, schema_json +): + httpx_mock.add_response(json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies( + schema_id, "Mutation", "basic", "{ complex }" + ) + + assert "Defining field dependencies for Mutation fields is not allowed." == str( + exc_info.value + ) + + +@pytest.mark.asyncio +async def test_add_field_dependencies_for_subscription_field_raises_error( + httpx_mock, schema_json +): + httpx_mock.add_response(json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies( + schema_id, "Subscription", "basic", "{ complex }" + ) + + assert "Defining field dependencies for Subscription fields is not allowed." == str( + exc_info.value + ) + + +@pytest.mark.asyncio +async def test_add_field_dependencies_for_nonexisting_type_raises_error( + httpx_mock, schema_json +): + httpx_mock.add_response(json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies( + schema_id, "Invalid", "basic", "{ complex }" + ) + + assert "Type 'Invalid' doesn't exist in schema with ID '0'." == str(exc_info.value) + + +@pytest.mark.asyncio +async def test_add_field_dependencies_for_invalid_type_raises_error( + httpx_mock, schema_json +): + httpx_mock.add_response(json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies( + schema_id, "InputType", "invalid", "{ group { name } }" + ) + + assert "Type 'InputType' in schema with ID '0' is not an object type." == str( + exc_info.value + ) + + +@pytest.mark.asyncio +async def test_add_field_dependencies_for_nonexisting_type_field_raises_error( + httpx_mock, schema_json +): + httpx_mock.add_response(json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies( + schema_id, "Complex", "invalid", "{ group { name } }" + ) + + assert ( + "Type 'Complex' doesn't define the 'invalid' field in any of schemas." + == str(exc_info.value) + ) + + +@pytest.mark.asyncio +async def test_add_field_dependencies_with_invalid_dependencies_arg_raises_error( + httpx_mock, schema_json +): + httpx_mock.add_response(url="http://graphql.example.com/", json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies( + schema_id, "Complex", "class", "group { id }" + ) + + assert ( + "'class' field dependencies should be defined as a single GraphQL " + "operation, e.g.: '{ field other { subfield } }'." + ) == str(exc_info.value) + + +@pytest.mark.asyncio +async def test_add_field_dependencies_with_invalid_dependencies_arg_op_raises_error( + httpx_mock, schema_json +): + httpx_mock.add_response(url="http://graphql.example.com/", json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + with pytest.raises(ValueError) as exc_info: + proxy_schema.add_field_dependencies( + schema_id, "Complex", "class", "mutation { group { id } }" + ) + + assert ( + "'class' field dependencies should be defined as a single GraphQL " + "operation, e.g.: '{ field other { subfield } }'." + ) == str(exc_info.value) + + +@pytest.mark.asyncio +async def test_root_value_for_remote_schema_includes_field_dependencies( + httpx_mock, schema_json +): + httpx_mock.add_response(url="http://graphql.example.com/", json=schema_json) + + proxy_schema = ProxySchema() + schema_id = proxy_schema.add_remote_schema("http://graphql.example.com/") + + proxy_schema.add_field_dependencies( + schema_id, "Complex", "class", "{ group { id } }" + ) + + proxy_schema.get_final_schema() + + await proxy_schema.root_resolver( + {}, + "TestQuery", + {}, + parse( + """ + query TestQuery { + complex { + class + } + } + """ + ), + ) + + request = httpx_mock.get_requests(url="http://graphql.example.com/")[-1] + assert json.loads(request.content) == { + "operationName": "TestQuery", + "variables": None, + "query": dedent( + """ + query TestQuery { + complex { + group { + id + } + class + } + } + """ + ).strip(), + }