From ec829a26604337377320d36b2780111bae50022c Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 1 Aug 2018 23:06:12 +0200 Subject: [PATCH] Start the repository for GraphQL-core-next Copied everything from github.com/Cito to github.com/graphql-python. --- .editorconfig | 21 + .flake8 | 2 + .gitignore | 18 + .mypy.ini | 2 + .travis.yml | 23 + LICENSE | 23 + MANIFEST.in | 13 + Makefile | 88 ++ Pipfile | 17 + Pipfile.lock | 367 +++++ README.md | 216 +++ docs/Makefile | 225 +++ docs/conf.py | 334 ++++ docs/index.rst | 23 + docs/intro.rst | 95 ++ docs/make.bat | 281 ++++ docs/modules/error.rst | 15 + docs/modules/execution.rst | 16 + docs/modules/graphql.rst | 29 + docs/modules/language.rst | 104 ++ docs/modules/pyutils.rst | 20 + docs/modules/subscription.rst | 8 + docs/modules/type.rst | 191 +++ docs/modules/utilities.rst | 102 ++ docs/modules/validation.rst | 122 ++ docs/usage/extension.rst | 47 + docs/usage/index.rst | 18 + docs/usage/introspection.rst | 63 + docs/usage/other.rst | 10 + docs/usage/parser.rst | 70 + docs/usage/queries.rst | 130 ++ docs/usage/resolvers.rst | 99 ++ docs/usage/schema.rst | 195 +++ docs/usage/sdl.rst | 83 + docs/usage/validator.rst | 41 + graphql/__init__.py | 440 ++++++ graphql/error/__init__.py | 16 + graphql/error/format_error.py | 23 + graphql/error/graphql_error.py | 142 ++ graphql/error/invalid.py | 24 + graphql/error/located_error.py | 45 + graphql/error/print_error.py | 78 + graphql/error/syntax_error.py | 12 + graphql/execution/__init__.py | 15 + graphql/execution/execute.py | 951 ++++++++++++ graphql/execution/values.py | 184 +++ graphql/graphql.py | 147 ++ graphql/language/__init__.py | 73 + graphql/language/ast.py | 465 ++++++ graphql/language/block_string_value.py | 41 + graphql/language/directive_locations.py | 29 + graphql/language/lexer.py | 446 ++++++ graphql/language/location.py | 21 + graphql/language/parser.py | 969 ++++++++++++ graphql/language/printer.py | 279 ++++ graphql/language/source.py | 47 + graphql/language/visitor.py | 378 +++++ graphql/pyutils/__init__.py | 30 + graphql/pyutils/cached_property.py | 24 + graphql/pyutils/contain_subset.py | 34 + graphql/pyutils/convert_case.py | 25 + graphql/pyutils/dedent.py | 12 + graphql/pyutils/event_emitter.py | 65 + graphql/pyutils/is_finite.py | 10 + graphql/pyutils/is_integer.py | 10 + graphql/pyutils/is_invalid.py | 10 + graphql/pyutils/is_nullish.py | 10 + graphql/pyutils/maybe_awaitable.py | 8 + graphql/pyutils/or_list.py | 16 + graphql/pyutils/quoted_or_list.py | 13 + graphql/pyutils/suggestion_list.py | 62 + graphql/subscription/__init__.py | 9 + graphql/subscription/map_async_iterator.py | 73 + graphql/subscription/subscribe.py | 155 ++ graphql/type/__init__.py | 115 ++ graphql/type/definition.py | 1204 ++++++++++++++ graphql/type/directives.py | 135 ++ graphql/type/introspection.py | 411 +++++ graphql/type/scalars.py | 233 +++ graphql/type/schema.py | 226 +++ graphql/type/validate.py | 546 +++++++ graphql/utilities/__init__.py | 91 ++ graphql/utilities/assert_valid_name.py | 34 + graphql/utilities/ast_from_value.py | 110 ++ graphql/utilities/build_ast_schema.py | 381 +++++ graphql/utilities/build_client_schema.py | 274 ++++ graphql/utilities/coerce_value.py | 178 +++ graphql/utilities/concat_ast.py | 17 + graphql/utilities/extend_schema.py | 491 ++++++ graphql/utilities/find_breaking_changes.py | 695 +++++++++ graphql/utilities/find_deprecated_usages.py | 55 + graphql/utilities/get_operation_ast.py | 29 + graphql/utilities/get_operation_root_type.py | 39 + .../utilities/introspection_from_schema.py | 34 + graphql/utilities/introspection_query.py | 100 ++ .../utilities/lexicographic_sort_schema.py | 142 ++ graphql/utilities/schema_printer.py | 286 ++++ graphql/utilities/separate_operations.py | 98 ++ graphql/utilities/type_comparators.py | 112 ++ graphql/utilities/type_from_ast.py | 52 + graphql/utilities/type_info.py | 247 +++ graphql/utilities/value_from_ast.py | 146 ++ graphql/utilities/value_from_ast_untyped.py | 84 + graphql/validation/__init__.py | 107 ++ graphql/validation/rules/__init__.py | 16 + .../rules/executable_definitions.py | 30 + .../rules/fields_on_correct_type.py | 107 ++ .../rules/fragments_on_composite_types.py | 48 + .../validation/rules/known_argument_names.py | 66 + graphql/validation/rules/known_directives.py | 85 + .../validation/rules/known_fragment_names.py | 23 + graphql/validation/rules/known_type_names.py | 43 + .../rules/lone_anonymous_operation.py | 33 + .../validation/rules/no_fragment_cycles.py | 74 + .../rules/no_undefined_variables.py | 38 + .../validation/rules/no_unused_fragments.py | 43 + .../validation/rules/no_unused_variables.py | 41 + .../rules/overlapping_fields_can_be_merged.py | 750 +++++++++ .../rules/possible_fragment_spreads.py | 60 + .../rules/provided_required_arguments.py | 57 + graphql/validation/rules/scalar_leafs.py | 43 + .../rules/single_field_subscriptions.py | 27 + .../validation/rules/unique_argument_names.py | 37 + .../rules/unique_directives_per_location.py | 36 + .../validation/rules/unique_fragment_names.py | 34 + .../rules/unique_input_field_names.py | 38 + .../rules/unique_operation_names.py | 36 + .../validation/rules/unique_variable_names.py | 32 + .../rules/values_of_correct_type.py | 145 ++ .../rules/variables_are_input_types.py | 30 + .../rules/variables_in_allowed_position.py | 80 + graphql/validation/specified_rules.py | 119 ++ graphql/validation/validate.py | 53 + graphql/validation/validation_context.py | 174 +++ pytest.ini | 2 + setup.cfg | 20 + setup.py | 42 + tests/__init__.py | 1 + tests/error/__init__.py | 1 + tests/error/test_graphql_error.py | 102 ++ tests/error/test_located_error.py | 24 + tests/error/test_print_error.py | 70 + tests/execution/__init__.py | 1 + tests/execution/test_abstract.py | 268 ++++ tests/execution/test_abstract_async.py | 300 ++++ tests/execution/test_directives.py | 220 +++ tests/execution/test_executor.py | 675 ++++++++ tests/execution/test_lists.py | 367 +++++ tests/execution/test_mutations.py | 158 ++ tests/execution/test_nonnull.py | 509 ++++++ tests/execution/test_resolve.py | 85 + tests/execution/test_schema.py | 145 ++ tests/execution/test_sync.py | 82 + tests/execution/test_union_interface.py | 294 ++++ tests/execution/test_variables.py | 717 +++++++++ tests/language/__init__.py | 20 + tests/language/kitchen_sink.graphql | 59 + tests/language/schema_kitchen_sink.graphql | 131 ++ tests/language/test_ast.py | 51 + tests/language/test_block_string_value.py | 73 + tests/language/test_lexer.py | 298 ++++ tests/language/test_parser.py | 435 ++++++ tests/language/test_printer.py | 162 ++ tests/language/test_schema_parser.py | 447 ++++++ tests/language/test_schema_printer.py | 160 ++ tests/language/test_visitor.py | 1348 ++++++++++++++++ tests/pyutils/__init__.py | 1 + tests/pyutils/test_cached_property.py | 31 + tests/pyutils/test_contain_subset.py | 140 ++ tests/pyutils/test_convert_case.py | 53 + tests/pyutils/test_dedent.py | 70 + tests/pyutils/test_event_emitter.py | 103 ++ tests/pyutils/test_is_finite.py | 42 + tests/pyutils/test_is_integer.py | 70 + tests/pyutils/test_is_invalid.py | 32 + tests/pyutils/test_is_nullish.py | 32 + tests/pyutils/test_or_list.py | 29 + tests/pyutils/test_quoted_or_list.py | 23 + tests/pyutils/test_suggesion_list.py | 22 + tests/star_wars_data.py | 139 ++ tests/star_wars_schema.py | 206 +++ tests/subscription/__init__.py | 1 + tests/subscription/test_map_async_iterator.py | 173 +++ tests/subscription/test_subscribe.py | 626 ++++++++ tests/test_star_wars_introspection.py | 367 +++++ tests/test_star_wars_query.py | 422 +++++ tests/test_star_wars_validation.py | 108 ++ tests/type/__init__.py | 1 + tests/type/test_definition.py | 821 ++++++++++ tests/type/test_enum.py | 251 +++ tests/type/test_introspection.py | 1175 ++++++++++++++ tests/type/test_predicate.py | 372 +++++ tests/type/test_schema.py | 59 + tests/type/test_serialization.py | 210 +++ tests/type/test_validation.py | 1377 +++++++++++++++++ tests/utilities/__init__.py | 1 + tests/utilities/test_assert_valid_name.py | 28 + tests/utilities/test_ast_from_value.py | 182 +++ tests/utilities/test_build_ast_schema.py | 936 +++++++++++ tests/utilities/test_build_client_schema.py | 435 ++++++ tests/utilities/test_coerce_value.py | 231 +++ tests/utilities/test_concat_ast.py | 33 + tests/utilities/test_extend_schema.py | 1241 +++++++++++++++ tests/utilities/test_find_breaking_changes.py | 1033 +++++++++++++ .../utilities/test_find_deprecated_usages.py | 43 + tests/utilities/test_get_operation_ast.py | 55 + .../utilities/test_get_operation_root_type.py | 111 ++ .../test_introspection_from_schema.py | 45 + .../test_lexicographic_sort_schema.py | 345 +++++ tests/utilities/test_schema_printer.py | 734 +++++++++ tests/utilities/test_separate_operations.py | 158 ++ tests/utilities/test_type_comparators.py | 82 + tests/utilities/test_value_from_ast.py | 172 ++ .../utilities/test_value_from_ast_untyped.py | 49 + tests/validation/__init__.py | 1 + tests/validation/harness.py | 260 ++++ .../validation/test_executable_definitions.py | 75 + .../validation/test_fields_on_correct_type.py | 226 +++ .../test_fragments_on_composite_types.py | 95 ++ tests/validation/test_known_argument_names.py | 146 ++ tests/validation/test_known_directives.py | 199 +++ tests/validation/test_known_fragment_names.py | 59 + tests/validation/test_known_type_names.py | 63 + .../test_lone_anonymous_operation.py | 86 + tests/validation/test_no_fragment_cycles.py | 172 ++ .../validation/test_no_undefined_variables.py | 269 ++++ tests/validation/test_no_unused_fragments.py | 142 ++ tests/validation/test_no_unused_variables.py | 193 +++ .../test_overlapping_fields_can_be_merged.py | 827 ++++++++++ .../test_possible_fragment_spreads.py | 182 +++ .../test_provided_required_arguments.py | 196 +++ tests/validation/test_scalar_leafs.py | 97 ++ .../test_single_field_subscriptions.py | 60 + .../validation/test_unique_argument_names.py | 116 ++ .../test_unique_directives_per_location.py | 80 + .../validation/test_unique_fragment_names.py | 98 ++ .../test_unique_input_field_names.py | 69 + .../validation/test_unique_operation_names.py | 107 ++ .../validation/test_unique_variable_names.py | 32 + tests/validation/test_validation.py | 68 + .../validation/test_values_of_correct_type.py | 884 +++++++++++ .../test_variables_are_input_types.py | 31 + .../test_variables_in_allowed_position.py | 280 ++++ tox.ini | 30 + 244 files changed, 42852 insertions(+) create mode 100644 .editorconfig create mode 100644 .flake8 create mode 100644 .gitignore create mode 100644 .mypy.ini create mode 100644 .travis.yml create mode 100644 LICENSE create mode 100644 MANIFEST.in create mode 100644 Makefile create mode 100644 Pipfile create mode 100644 Pipfile.lock create mode 100644 README.md create mode 100644 docs/Makefile create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/intro.rst create mode 100644 docs/make.bat create mode 100644 docs/modules/error.rst create mode 100644 docs/modules/execution.rst create mode 100644 docs/modules/graphql.rst create mode 100644 docs/modules/language.rst create mode 100644 docs/modules/pyutils.rst create mode 100644 docs/modules/subscription.rst create mode 100644 docs/modules/type.rst create mode 100644 docs/modules/utilities.rst create mode 100644 docs/modules/validation.rst create mode 100644 docs/usage/extension.rst create mode 100644 docs/usage/index.rst create mode 100644 docs/usage/introspection.rst create mode 100644 docs/usage/other.rst create mode 100644 docs/usage/parser.rst create mode 100644 docs/usage/queries.rst create mode 100644 docs/usage/resolvers.rst create mode 100644 docs/usage/schema.rst create mode 100644 docs/usage/sdl.rst create mode 100644 docs/usage/validator.rst create mode 100644 graphql/__init__.py create mode 100644 graphql/error/__init__.py create mode 100644 graphql/error/format_error.py create mode 100644 graphql/error/graphql_error.py create mode 100644 graphql/error/invalid.py create mode 100644 graphql/error/located_error.py create mode 100644 graphql/error/print_error.py create mode 100644 graphql/error/syntax_error.py create mode 100644 graphql/execution/__init__.py create mode 100644 graphql/execution/execute.py create mode 100644 graphql/execution/values.py create mode 100644 graphql/graphql.py create mode 100644 graphql/language/__init__.py create mode 100644 graphql/language/ast.py create mode 100644 graphql/language/block_string_value.py create mode 100644 graphql/language/directive_locations.py create mode 100644 graphql/language/lexer.py create mode 100644 graphql/language/location.py create mode 100644 graphql/language/parser.py create mode 100644 graphql/language/printer.py create mode 100644 graphql/language/source.py create mode 100644 graphql/language/visitor.py create mode 100644 graphql/pyutils/__init__.py create mode 100644 graphql/pyutils/cached_property.py create mode 100644 graphql/pyutils/contain_subset.py create mode 100644 graphql/pyutils/convert_case.py create mode 100644 graphql/pyutils/dedent.py create mode 100644 graphql/pyutils/event_emitter.py create mode 100644 graphql/pyutils/is_finite.py create mode 100644 graphql/pyutils/is_integer.py create mode 100644 graphql/pyutils/is_invalid.py create mode 100644 graphql/pyutils/is_nullish.py create mode 100644 graphql/pyutils/maybe_awaitable.py create mode 100644 graphql/pyutils/or_list.py create mode 100644 graphql/pyutils/quoted_or_list.py create mode 100644 graphql/pyutils/suggestion_list.py create mode 100644 graphql/subscription/__init__.py create mode 100644 graphql/subscription/map_async_iterator.py create mode 100644 graphql/subscription/subscribe.py create mode 100644 graphql/type/__init__.py create mode 100644 graphql/type/definition.py create mode 100644 graphql/type/directives.py create mode 100644 graphql/type/introspection.py create mode 100644 graphql/type/scalars.py create mode 100644 graphql/type/schema.py create mode 100644 graphql/type/validate.py create mode 100644 graphql/utilities/__init__.py create mode 100644 graphql/utilities/assert_valid_name.py create mode 100644 graphql/utilities/ast_from_value.py create mode 100644 graphql/utilities/build_ast_schema.py create mode 100644 graphql/utilities/build_client_schema.py create mode 100644 graphql/utilities/coerce_value.py create mode 100644 graphql/utilities/concat_ast.py create mode 100644 graphql/utilities/extend_schema.py create mode 100644 graphql/utilities/find_breaking_changes.py create mode 100644 graphql/utilities/find_deprecated_usages.py create mode 100644 graphql/utilities/get_operation_ast.py create mode 100644 graphql/utilities/get_operation_root_type.py create mode 100644 graphql/utilities/introspection_from_schema.py create mode 100644 graphql/utilities/introspection_query.py create mode 100644 graphql/utilities/lexicographic_sort_schema.py create mode 100644 graphql/utilities/schema_printer.py create mode 100644 graphql/utilities/separate_operations.py create mode 100644 graphql/utilities/type_comparators.py create mode 100644 graphql/utilities/type_from_ast.py create mode 100644 graphql/utilities/type_info.py create mode 100644 graphql/utilities/value_from_ast.py create mode 100644 graphql/utilities/value_from_ast_untyped.py create mode 100644 graphql/validation/__init__.py create mode 100644 graphql/validation/rules/__init__.py create mode 100644 graphql/validation/rules/executable_definitions.py create mode 100644 graphql/validation/rules/fields_on_correct_type.py create mode 100644 graphql/validation/rules/fragments_on_composite_types.py create mode 100644 graphql/validation/rules/known_argument_names.py create mode 100644 graphql/validation/rules/known_directives.py create mode 100644 graphql/validation/rules/known_fragment_names.py create mode 100644 graphql/validation/rules/known_type_names.py create mode 100644 graphql/validation/rules/lone_anonymous_operation.py create mode 100644 graphql/validation/rules/no_fragment_cycles.py create mode 100644 graphql/validation/rules/no_undefined_variables.py create mode 100644 graphql/validation/rules/no_unused_fragments.py create mode 100644 graphql/validation/rules/no_unused_variables.py create mode 100644 graphql/validation/rules/overlapping_fields_can_be_merged.py create mode 100644 graphql/validation/rules/possible_fragment_spreads.py create mode 100644 graphql/validation/rules/provided_required_arguments.py create mode 100644 graphql/validation/rules/scalar_leafs.py create mode 100644 graphql/validation/rules/single_field_subscriptions.py create mode 100644 graphql/validation/rules/unique_argument_names.py create mode 100644 graphql/validation/rules/unique_directives_per_location.py create mode 100644 graphql/validation/rules/unique_fragment_names.py create mode 100644 graphql/validation/rules/unique_input_field_names.py create mode 100644 graphql/validation/rules/unique_operation_names.py create mode 100644 graphql/validation/rules/unique_variable_names.py create mode 100644 graphql/validation/rules/values_of_correct_type.py create mode 100644 graphql/validation/rules/variables_are_input_types.py create mode 100644 graphql/validation/rules/variables_in_allowed_position.py create mode 100644 graphql/validation/specified_rules.py create mode 100644 graphql/validation/validate.py create mode 100644 graphql/validation/validation_context.py create mode 100644 pytest.ini create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 tests/__init__.py create mode 100644 tests/error/__init__.py create mode 100644 tests/error/test_graphql_error.py create mode 100644 tests/error/test_located_error.py create mode 100644 tests/error/test_print_error.py create mode 100644 tests/execution/__init__.py create mode 100644 tests/execution/test_abstract.py create mode 100644 tests/execution/test_abstract_async.py create mode 100644 tests/execution/test_directives.py create mode 100644 tests/execution/test_executor.py create mode 100644 tests/execution/test_lists.py create mode 100644 tests/execution/test_mutations.py create mode 100644 tests/execution/test_nonnull.py create mode 100644 tests/execution/test_resolve.py create mode 100644 tests/execution/test_schema.py create mode 100644 tests/execution/test_sync.py create mode 100644 tests/execution/test_union_interface.py create mode 100644 tests/execution/test_variables.py create mode 100644 tests/language/__init__.py create mode 100644 tests/language/kitchen_sink.graphql create mode 100644 tests/language/schema_kitchen_sink.graphql create mode 100644 tests/language/test_ast.py create mode 100644 tests/language/test_block_string_value.py create mode 100644 tests/language/test_lexer.py create mode 100644 tests/language/test_parser.py create mode 100644 tests/language/test_printer.py create mode 100644 tests/language/test_schema_parser.py create mode 100644 tests/language/test_schema_printer.py create mode 100644 tests/language/test_visitor.py create mode 100644 tests/pyutils/__init__.py create mode 100644 tests/pyutils/test_cached_property.py create mode 100644 tests/pyutils/test_contain_subset.py create mode 100644 tests/pyutils/test_convert_case.py create mode 100644 tests/pyutils/test_dedent.py create mode 100644 tests/pyutils/test_event_emitter.py create mode 100644 tests/pyutils/test_is_finite.py create mode 100644 tests/pyutils/test_is_integer.py create mode 100644 tests/pyutils/test_is_invalid.py create mode 100644 tests/pyutils/test_is_nullish.py create mode 100644 tests/pyutils/test_or_list.py create mode 100644 tests/pyutils/test_quoted_or_list.py create mode 100644 tests/pyutils/test_suggesion_list.py create mode 100644 tests/star_wars_data.py create mode 100644 tests/star_wars_schema.py create mode 100644 tests/subscription/__init__.py create mode 100644 tests/subscription/test_map_async_iterator.py create mode 100644 tests/subscription/test_subscribe.py create mode 100644 tests/test_star_wars_introspection.py create mode 100644 tests/test_star_wars_query.py create mode 100644 tests/test_star_wars_validation.py create mode 100644 tests/type/__init__.py create mode 100644 tests/type/test_definition.py create mode 100644 tests/type/test_enum.py create mode 100644 tests/type/test_introspection.py create mode 100644 tests/type/test_predicate.py create mode 100644 tests/type/test_schema.py create mode 100644 tests/type/test_serialization.py create mode 100644 tests/type/test_validation.py create mode 100644 tests/utilities/__init__.py create mode 100644 tests/utilities/test_assert_valid_name.py create mode 100644 tests/utilities/test_ast_from_value.py create mode 100644 tests/utilities/test_build_ast_schema.py create mode 100644 tests/utilities/test_build_client_schema.py create mode 100644 tests/utilities/test_coerce_value.py create mode 100644 tests/utilities/test_concat_ast.py create mode 100644 tests/utilities/test_extend_schema.py create mode 100644 tests/utilities/test_find_breaking_changes.py create mode 100644 tests/utilities/test_find_deprecated_usages.py create mode 100644 tests/utilities/test_get_operation_ast.py create mode 100644 tests/utilities/test_get_operation_root_type.py create mode 100644 tests/utilities/test_introspection_from_schema.py create mode 100644 tests/utilities/test_lexicographic_sort_schema.py create mode 100644 tests/utilities/test_schema_printer.py create mode 100644 tests/utilities/test_separate_operations.py create mode 100644 tests/utilities/test_type_comparators.py create mode 100644 tests/utilities/test_value_from_ast.py create mode 100644 tests/utilities/test_value_from_ast_untyped.py create mode 100644 tests/validation/__init__.py create mode 100644 tests/validation/harness.py create mode 100644 tests/validation/test_executable_definitions.py create mode 100644 tests/validation/test_fields_on_correct_type.py create mode 100644 tests/validation/test_fragments_on_composite_types.py create mode 100644 tests/validation/test_known_argument_names.py create mode 100644 tests/validation/test_known_directives.py create mode 100644 tests/validation/test_known_fragment_names.py create mode 100644 tests/validation/test_known_type_names.py create mode 100644 tests/validation/test_lone_anonymous_operation.py create mode 100644 tests/validation/test_no_fragment_cycles.py create mode 100644 tests/validation/test_no_undefined_variables.py create mode 100644 tests/validation/test_no_unused_fragments.py create mode 100644 tests/validation/test_no_unused_variables.py create mode 100644 tests/validation/test_overlapping_fields_can_be_merged.py create mode 100644 tests/validation/test_possible_fragment_spreads.py create mode 100644 tests/validation/test_provided_required_arguments.py create mode 100644 tests/validation/test_scalar_leafs.py create mode 100644 tests/validation/test_single_field_subscriptions.py create mode 100644 tests/validation/test_unique_argument_names.py create mode 100644 tests/validation/test_unique_directives_per_location.py create mode 100644 tests/validation/test_unique_fragment_names.py create mode 100644 tests/validation/test_unique_input_field_names.py create mode 100644 tests/validation/test_unique_operation_names.py create mode 100644 tests/validation/test_unique_variable_names.py create mode 100644 tests/validation/test_validation.py create mode 100644 tests/validation/test_values_of_correct_type.py create mode 100644 tests/validation/test_variables_are_input_types.py create mode 100644 tests/validation/test_variables_in_allowed_position.py create mode 100644 tox.ini diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..d4a2c440 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,21 @@ +# http://editorconfig.org + +root = true + +[*] +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true +insert_final_newline = true +charset = utf-8 +end_of_line = lf + +[*.bat] +indent_style = tab +end_of_line = crlf + +[LICENSE] +insert_final_newline = false + +[Makefile] +indent_style = tab diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..5960dc31 --- /dev/null +++ b/.flake8 @@ -0,0 +1,2 @@ +[flake8] +exclude = .git,.mypy_cache,.pytest_cache,.tox,.venv,__pycache__,build,dist,docs diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..aea15f7d --- /dev/null +++ b/.gitignore @@ -0,0 +1,18 @@ + +.cache +.coverage +.env +.idea +.mypy_cache +.pytest_cache +.tox +.venv + +build +dist +docs/_build + +__pycache__ + +*.egg-info +*.py[cod] diff --git a/.mypy.ini b/.mypy.ini new file mode 100644 index 00000000..b3d7f6e1 --- /dev/null +++ b/.mypy.ini @@ -0,0 +1,2 @@ +[mypy] +python_version = 3.6 diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..f6e1f3b8 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,23 @@ +language: python + +python: + - 3.6 +# - 3.7 done in the matrix below + +install: + - pip install pipenv + - pipenv install --dev + +matrix: + include: + - python: 3.7 + dist: xenial # required for Python 3.7, + sudo: true # see travis-ci/travis-ci#9069 + +script: + - flake8 graphql tests + - mypy graphql + - pytest --cov-report term-missing --cov=graphql + +after_success: + - coveralls diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..33973268 --- /dev/null +++ b/LICENSE @@ -0,0 +1,23 @@ +MIT License + +Copyright (c) 2017-2018 Facebook, Inc. (GraphQL.js) +Copyright (c) 2016 Syrus Akbary (GraphQL-core) +Copyright (c) 2018 Christoph Zwerschke (GraphQL-core-next) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..ed1bac96 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,13 @@ +include LICENSE +include README.md +include Makefile +include Pipfile +include tox.ini + +recursive-include graphql * +recursive-include tests * +recursive-exclude * __pycache__ +recursive-exclude * .mypy_cache +recursive-exclude * *.py[co] + +recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..227a1caa --- /dev/null +++ b/Makefile @@ -0,0 +1,88 @@ +.PHONY: clean clean-test clean-pyc clean-build docs help +.DEFAULT_GOAL := help + +define BROWSER_PYSCRIPT +import os, webbrowser, sys + +try: + from urllib import pathname2url +except: + from urllib.request import pathname2url + +webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) +endef +export BROWSER_PYSCRIPT + +define PRINT_HELP_PYSCRIPT +import re, sys + +for line in sys.stdin: + match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) + if match: + target, help = match.groups() + print("%-20s %s" % (target, help)) +endef +export PRINT_HELP_PYSCRIPT + +BROWSER := python -c "$$BROWSER_PYSCRIPT" + +help: + @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) + +clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts + +clean-build: ## remove build artifacts + rm -fr build/ + rm -fr dist/ + rm -fr .eggs/ + find . -name '*.egg-info' -exec rm -fr {} + + find . -name '*.egg' -exec rm -f {} + + +clean-pyc: ## remove Python file artifacts + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + find . -name '__pycache__' -exec rm -fr {} + + +clean-test: ## remove test and coverage artifacts + rm -fr .tox/ + rm -f .coverage + rm -fr htmlcov/ + rm -fr .pytest_cache + +lint: ## check style with flake8 + flake8 graphql tests + +test: ## run tests quickly with the default Python + py.test + +test-all: ## run tests on every Python version with tox + tox + +coverage: ## check code coverage quickly with the default Python + coverage run --source graphql -m pytest + coverage report -m + coverage html + $(BROWSER) htmlcov/index.html + +docs: ## generate Sphinx HTML documentation, including API docs + rm -f docs/graphql.rst + rm -f docs/modules.rst + sphinx-apidoc -o docs/ graphql + $(MAKE) -C docs clean + $(MAKE) -C docs html + $(BROWSER) docs/_build/html/index.html + +servedocs: docs ## compile the docs watching for changes + watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . + +release: dist ## package and upload a release + twine upload dist/* + +dist: clean ## builds source and wheel package + python setup.py sdist + python setup.py bdist_wheel + ls -l dist + +install: clean ## install the package to the active Python's site-packages + python setup.py install diff --git a/Pipfile b/Pipfile new file mode 100644 index 00000000..2e713c48 --- /dev/null +++ b/Pipfile @@ -0,0 +1,17 @@ +[[source]] +url = "https://pypi.python.org/simple" +verify_ssl = true +name = "pypi" + +[dev-packages] +graphql-core-next = {path = ".", editable = true} +flake8 = "*" +mypy = "*" +pytest = "*" +pytest-describe = "*" +pytest-asyncio = "*" +tox = "*" +sphinx = "*" +sphinx_rtd_theme = "*" +python-coveralls = "*" +pytest-cov = "*" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 00000000..124101f0 --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,367 @@ +{ + "_meta": { + "hash": { + "sha256": "3b26026ae00af20c39e7653879237c85fb6b48443f367bb5b236d94ac25971fe" + }, + "pipfile-spec": 6, + "requires": {}, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.python.org/simple", + "verify_ssl": true + } + ] + }, + "default": {}, + "develop": { + "alabaster": { + "hashes": [ + "sha256:674bb3bab080f598371f4443c5008cbfeb1a5e622dd312395d2d82af2c54c456", + "sha256:b63b1f4dc77c074d386752ec4a8a7517600f6c0db8cd42980cae17ab7b3275d7" + ], + "version": "==0.7.11" + }, + "atomicwrites": { + "hashes": [ + "sha256:240831ea22da9ab882b551b31d4225591e5e447a68c5e188db5b89ca1d487585", + "sha256:a24da68318b08ac9c9c45029f4a10371ab5b20e4226738e150e6e7c571630ae6" + ], + "version": "==1.1.5" + }, + "attrs": { + "hashes": [ + "sha256:4b90b09eeeb9b88c35bc642cbac057e45a5fd85367b985bd2809c62b7b939265", + "sha256:e0d0eb91441a3b53dab4d9b743eafc1ac44476296a2053b6ca3af0b139faf87b" + ], + "version": "==18.1.0" + }, + "babel": { + "hashes": [ + "sha256:6778d85147d5d85345c14a26aada5e478ab04e39b078b0745ee6870c2b5cf669", + "sha256:8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23" + ], + "version": "==2.6.0" + }, + "certifi": { + "hashes": [ + "sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7", + "sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0" + ], + "version": "==2018.4.16" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "colorama": { + "hashes": [ + "sha256:463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda", + "sha256:48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1" + ], + "markers": "sys_platform == 'win32'", + "version": "==0.3.9" + }, + "coverage": { + "hashes": [ + "sha256:00d464797a236f654337181af72b4baea3d35d056ca480e45e9163bb5df496b8", + "sha256:0a90afa6f5ea08889da9066dca3ce2ef85d47587e3f66ca06a4fa8d3a0053acc", + "sha256:50727512afe77e044c7d7f2fd4cd0fe62b06527f965b335a810d956748e0514d", + "sha256:6c2fd127cd4e2decb0ab41fe3ac2948b87ad2ea0470e24b4be5f7e7fdfef8df3", + "sha256:6ed521ed3800d8f8911642b9b3c3891780a929db5e572c88c4713c1032530f82", + "sha256:76a73a48a308fb87a4417d630b0345d36166f489ef17ea5aa8e4596fb50a2296", + "sha256:85b1275b6d7a61ccc8024a4e9a4c9e896394776edce1a5d075ec116f91925462", + "sha256:8e60e720cad3ee6b0a32f475ae4040552c5623870a9ca0d3d4263faa89a8d96b", + "sha256:93c50475f189cd226e9688b9897a0cd3c4c5d9c90b1733fa8f6445cfc0182c51", + "sha256:94c1e66610807a7917d967ed6415b9d5fde7487ab2a07bb5e054567865ef6ef0", + "sha256:964f86394cb4d0fd2bb40ffcddca321acf4323b48d1aa5a93db8b743c8a00f79", + "sha256:99043494b28d6460035dd9410269cdb437ee460edc7f96f07ab45c57ba95e651", + "sha256:af2f59ce312523c384a7826821cae0b95f320fee1751387abba4f00eed737166", + "sha256:beb96d32ce8cfa47ec6433d95a33e4afaa97c19ac1b4a47ea40a424fedfee7c2", + "sha256:c00bac0f6b35b82ace069a6a0d88e8fd4cd18d964fc5e47329cd02b212397fbe", + "sha256:d079e36baceea9707fd50b268305654151011274494a33c608c075808920eda8", + "sha256:e813cba9ff0e3d37ad31dc127fac85d23f9a26d0461ef8042ac4539b2045e781" + ], + "version": "==4.0.3" + }, + "docutils": { + "hashes": [ + "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", + "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", + "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6" + ], + "version": "==0.14" + }, + "flake8": { + "hashes": [ + "sha256:7253265f7abd8b313e3892944044a365e3f4ac3fcdcfb4298f55ee9ddf188ba0", + "sha256:c7841163e2b576d435799169b78703ad6ac1bbb0f199994fc05f700b2a90ea37" + ], + "index": "pypi", + "version": "==3.5.0" + }, + "graphql-core-next": { + "editable": true, + "path": "." + }, + "idna": { + "hashes": [ + "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", + "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" + ], + "version": "==2.7" + }, + "imagesize": { + "hashes": [ + "sha256:3620cc0cadba3f7475f9940d22431fc4d407269f1be59ec9b8edcca26440cf18", + "sha256:5b326e4678b6925158ccc66a9fa3122b6106d7c876ee32d7de6ce59385b96315" + ], + "version": "==1.0.0" + }, + "jinja2": { + "hashes": [ + "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", + "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" + ], + "version": "==2.10" + }, + "markupsafe": { + "hashes": [ + "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" + ], + "version": "==1.0" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "more-itertools": { + "hashes": [ + "sha256:c187a73da93e7a8acc0001572aebc7e3c69daf7bf6881a2cea10650bd4420092", + "sha256:c476b5d3a34e12d40130bc2f935028b5f636df8f372dc2c1c01dc19681b2039e", + "sha256:fcbfeaea0be121980e15bc97b3817b5202ca73d0eae185b4550cbfce2a3ebb3d" + ], + "version": "==4.3.0" + }, + "mypy": { + "hashes": [ + "sha256:673ea75fb750289b7d1da1331c125dc62fc1c3a8db9129bb372ae7b7d5bf300a", + "sha256:c770605a579fdd4a014e9f0a34b6c7a36ce69b08100ff728e96e27445cef3b3c" + ], + "index": "pypi", + "version": "==0.620" + }, + "packaging": { + "hashes": [ + "sha256:e9215d2d2535d3ae866c3d6efc77d5b24a0192cce0ff20e42896cc0664f889c0", + "sha256:f019b770dd64e585a99714f1fd5e01c7a8f11b45635aa953fd41c689a657375b" + ], + "version": "==17.1" + }, + "pluggy": { + "hashes": [ + "sha256:6e3836e39f4d36ae72840833db137f7b7d35105079aee6ec4a62d9f80d594dd1", + "sha256:95eb8364a4708392bae89035f45341871286a333f749c3141c20573d2b3876e1" + ], + "markers": "python_version != '3.3.*' and python_version != '3.0.*' and python_version >= '2.7' and python_version != '3.2.*' and python_version != '3.1.*'", + "version": "==0.7.1" + }, + "py": { + "hashes": [ + "sha256:3fd59af7435864e1a243790d322d763925431213b6b8529c6ca71081ace3bbf7", + "sha256:e31fb2767eb657cbde86c454f02e99cb846d3cd9d61b318525140214fdc0e98e" + ], + "markers": "python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.0.*' and python_version != '3.3.*' and python_version != '3.2.*'", + "version": "==1.5.4" + }, + "pycodestyle": { + "hashes": [ + "sha256:682256a5b318149ca0d2a9185d365d8864a768a28db66a84a2ea946bcc426766", + "sha256:6c4245ade1edfad79c3446fadfc96b0de2759662dc29d07d80a6f27ad1ca6ba9" + ], + "version": "==2.3.1" + }, + "pyflakes": { + "hashes": [ + "sha256:08bd6a50edf8cffa9fa09a463063c425ecaaf10d1eb0335a7e8b1401aef89e6f", + "sha256:8d616a382f243dbf19b54743f280b80198be0bca3a5396f1d2e1fca6223e8805" + ], + "version": "==1.6.0" + }, + "pygments": { + "hashes": [ + "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d", + "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc" + ], + "version": "==2.2.0" + }, + "pyparsing": { + "hashes": [ + "sha256:0832bcf47acd283788593e7a0f542407bd9550a55a8a8435214a1960e04bcb04", + "sha256:fee43f17a9c4087e7ed1605bd6df994c6173c1e977d7ade7b651292fab2bd010" + ], + "version": "==2.2.0" + }, + "pytest": { + "hashes": [ + "sha256:8214ab8446104a1d0c17fbd218ec6aac743236c6ffbe23abc038e40213c60b88", + "sha256:e2b2c6e1560b8f9dc8dd600b0923183fbd68ba3d9bdecde04467be6dd296a384" + ], + "index": "pypi", + "version": "==3.7.0" + }, + "pytest-asyncio": { + "hashes": [ + "sha256:a962e8e1b6ec28648c8fe214edab4e16bacdb37b52df26eb9d63050af309b2a9", + "sha256:fbd92c067c16111174a1286bfb253660f1e564e5146b39eeed1133315cf2c2cf" + ], + "index": "pypi", + "version": "==0.9.0" + }, + "pytest-cov": { + "hashes": [ + "sha256:03aa752cf11db41d281ea1d807d954c4eda35cfa1b21d6971966cc041bbf6e2d", + "sha256:890fe5565400902b0c78b5357004aab1c814115894f4f21370e2433256a3eeec" + ], + "index": "pypi", + "version": "==2.5.1" + }, + "pytest-describe": { + "hashes": [ + "sha256:bd6be131452b7822c872735ffe53ce3931b3b80cbbad1647c2b482cc9ef3d00e" + ], + "index": "pypi", + "version": "==0.11.1" + }, + "python-coveralls": { + "hashes": [ + "sha256:1748272081e0fc21e2c20c12e5bd18cb13272db1b130758df0d473da0cb31087", + "sha256:736dda01f64beda240e1500d5f264b969495b05fcb325c7c0eb7ebbfd1210b70" + ], + "index": "pypi", + "version": "==2.9.1" + }, + "pytz": { + "hashes": [ + "sha256:a061aa0a9e06881eb8b3b2b43f05b9439d6583c206d0a6c340ff72a7b6669053", + "sha256:ffb9ef1de172603304d9d2819af6f5ece76f2e85ec10692a524dd876e72bf277" + ], + "version": "==2018.5" + }, + "pyyaml": { + "hashes": [ + "sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b", + "sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf", + "sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a", + "sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3", + "sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1", + "sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1", + "sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613", + "sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04", + "sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f", + "sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537", + "sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531" + ], + "version": "==3.13" + }, + "requests": { + "hashes": [ + "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", + "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a" + ], + "version": "==2.19.1" + }, + "six": { + "hashes": [ + "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", + "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" + ], + "version": "==1.11.0" + }, + "snowballstemmer": { + "hashes": [ + "sha256:919f26a68b2c17a7634da993d91339e288964f93c274f1343e3bbbe2096e1128", + "sha256:9f3bcd3c401c3e862ec0ebe6d2c069ebc012ce142cce209c098ccb5b09136e89" + ], + "version": "==1.2.1" + }, + "sphinx": { + "hashes": [ + "sha256:217ad9ece2156ed9f8af12b5d2c82a499ddf2c70a33c5f81864a08d8c67b9efc", + "sha256:a765c6db1e5b62aae857697cd4402a5c1a315a7b0854bbcd0fc8cdc524da5896" + ], + "index": "pypi", + "version": "==1.7.6" + }, + "sphinx-rtd-theme": { + "hashes": [ + "sha256:3b49758a64f8a1ebd8a33cb6cc9093c3935a908b716edfaa5772fd86aac27ef6", + "sha256:80e01ec0eb711abacb1fa507f3eae8b805ae8fa3e8b057abfdf497e3f644c82c" + ], + "version": "==0.4.1" + }, + "sphinxcontrib-websupport": { + "hashes": [ + "sha256:68ca7ff70785cbe1e7bccc71a48b5b6d965d79ca50629606c7861a21b206d9dd", + "sha256:9de47f375baf1ea07cdb3436ff39d7a9c76042c10a769c52353ec46e4e8fc3b9" + ], + "version": "==1.1.0" + }, + "tox": { + "hashes": [ + "sha256:4df108a1fcc93a7ee4ac97e1a3a1fc3d41ddd22445d518976604e2ef05025280", + "sha256:9f0cbcc36e08c2c4ae90d02d3d1f9a62231f974bcbc1df85e8045946d8261059" + ], + "index": "pypi", + "version": "==3.1.2" + }, + "typed-ast": { + "hashes": [ + "sha256:0948004fa228ae071054f5208840a1e88747a357ec1101c17217bfe99b299d58", + "sha256:10703d3cec8dcd9eef5a630a04056bbc898abc19bac5691612acba7d1325b66d", + "sha256:1f6c4bd0bdc0f14246fd41262df7dfc018d65bb05f6e16390b7ea26ca454a291", + "sha256:25d8feefe27eb0303b73545416b13d108c6067b846b543738a25ff304824ed9a", + "sha256:29464a177d56e4e055b5f7b629935af7f49c196be47528cc94e0a7bf83fbc2b9", + "sha256:2e214b72168ea0275efd6c884b114ab42e316de3ffa125b267e732ed2abda892", + "sha256:3e0d5e48e3a23e9a4d1a9f698e32a542a4a288c871d33ed8df1b092a40f3a0f9", + "sha256:519425deca5c2b2bdac49f77b2c5625781abbaf9a809d727d3a5596b30bb4ded", + "sha256:57fe287f0cdd9ceaf69e7b71a2e94a24b5d268b35df251a88fef5cc241bf73aa", + "sha256:668d0cec391d9aed1c6a388b0d5b97cd22e6073eaa5fbaa6d2946603b4871efe", + "sha256:68ba70684990f59497680ff90d18e756a47bf4863c604098f10de9716b2c0bdd", + "sha256:6de012d2b166fe7a4cdf505eee3aaa12192f7ba365beeefaca4ec10e31241a85", + "sha256:79b91ebe5a28d349b6d0d323023350133e927b4de5b651a8aa2db69c761420c6", + "sha256:8550177fa5d4c1f09b5e5f524411c44633c80ec69b24e0e98906dd761941ca46", + "sha256:898f818399cafcdb93cbbe15fc83a33d05f18e29fb498ddc09b0214cdfc7cd51", + "sha256:94b091dc0f19291adcb279a108f5d38de2430411068b219f41b343c03b28fb1f", + "sha256:a26863198902cda15ab4503991e8cf1ca874219e0118cbf07c126bce7c4db129", + "sha256:a8034021801bc0440f2e027c354b4eafd95891b573e12ff0418dec385c76785c", + "sha256:bc978ac17468fe868ee589c795d06777f75496b1ed576d308002c8a5756fb9ea", + "sha256:c05b41bc1deade9f90ddc5d988fe506208019ebba9f2578c622516fd201f5863", + "sha256:c9b060bd1e5a26ab6e8267fd46fc9e02b54eb15fffb16d112d4c7b1c12987559", + "sha256:edb04bdd45bfd76c8292c4d9654568efaedf76fe78eb246dde69bdb13b2dad87", + "sha256:f19f2a4f547505fe9072e15f6f4ae714af51b5a681a97f187971f50c283193b6" + ], + "version": "==1.1.0" + }, + "urllib3": { + "hashes": [ + "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", + "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5" + ], + "version": "==1.23" + }, + "virtualenv": { + "hashes": [ + "sha256:2ce32cd126117ce2c539f0134eb89de91a8413a29baac49cbab3eb50e2026669", + "sha256:ca07b4c0b54e14a91af9f34d0919790b016923d157afda5efdde55c96718f752" + ], + "version": "==16.0.0" + } + } +} diff --git a/README.md b/README.md new file mode 100644 index 00000000..0953cc40 --- /dev/null +++ b/README.md @@ -0,0 +1,216 @@ +# GraphQL-core-next + +GraphQL-core-next is a Python port of [GraphQL.js](https://github.com/graphql/graphql-js), +the JavaScript reference implementation for [GraphQL](https://graphql.org/), +a query language for APIs created by Facebook. + +[![PyPI version](https://badge.fury.io/py/GraphQL-core-next.svg)](https://badge.fury.io/py/GraphQL-core-next) +[![Documentation Status](https://readthedocs.org/projects/graphql-core-next/badge/)](https://graphql-core-next.readthedocs.io) +[![Build Status](https://api.travis-ci.com/graphql-python/GraphQL-core-next.svg?branch=master)](https://travis-ci.com/graphql-python/GraphQL-core-next/) +[![Coverage Status](https://coveralls.io/repos/github/graphql-python/GraphQL-core-next/badge.svg?branch=master)](https://coveralls.io/github/graphql-python/GraphQL-core-next?branch=master) +[![Dependency Updates](https://pyup.io/repos/github/graphql-python/GraphQL-core-next/shield.svg)](https://pyup.io/repos/github/graphql-python/GraphQL-core-next/) +[![Python 3 Status](https://pyup.io/repos/github/graphql-python/GraphQL-core-next/python-3-shield.svg)](https://pyup.io/repos/github/graphql-python/GraphQL-core-next/) + +The current version 1.0.0rc1 of GraphQL-core-next is up-to-date with GraphQL.js +version 14.0.0rc2. All parts of the API are covered by an extensive test +suite of currently 1529 unit tests. + + +## Documentation + +A more detailed documentation for GraphQL-core-next can be found at +[graphql-core-next.readthedocs.io](https://graphql-core-next.readthedocs.io/). + +There will be also [blog articles](https://cito.github.io/tags/graphql/) +with more usage examples. + + +## Getting started + +An overview of GraphQL in general is available in the +[README](https://github.com/facebook/graphql/blob/master/README.md) for the +[Specification for GraphQL](https://github.com/facebook/graphql). That overview +describes a simple set of GraphQL examples that exist as [tests](tests) +in this repository. A good way to get started with this repository is to walk +through that README and the corresponding tests in parallel. + + +## Installation + +GraphQL-core-next can be installed from PyPI using the built-in pip command: + + python -m pip install graphql-core-next + +Alternatively, you can also use [pipenv](https://docs.pipenv.org/) for +installation in a virtual environment: + + pipenv install graphql-core-next + + +## Usage + +GraphQL-core-next provides two important capabilities: building a type schema, +and serving queries against that type schema. + +First, build a GraphQL type schema which maps to your code base: + +```python +from graphql import ( + GraphQLSchema, GraphQLObjectType, GraphQLField, GraphQLString) + +schema = GraphQLSchema( + query=GraphQLObjectType( + name='RootQueryType', + fields={ + 'hello': GraphQLField( + GraphQLString, + resolve=lambda obj, info: 'world') + })) +``` + +This defines a simple schema with one type and one field, that resolves +to a fixed value. The `resolve` function can return a value, a co-routine +object or a list of these. It takes two positional arguments; the first one +provides the root or the resolved parent field, the second one provides a +`GraphQLResolveInfo` object which contains information about the execution +state of the query, including a `context` attribute holding per-request state +such as authentication information or database session. Any GraphQL arguments +are passed to the `resolve` functions as individual keyword arguments. + +Note that the signature of the resolver functions is a bit different in +GraphQL.js, where the context is passed separately and arguments are passed +as a single object. Also note that GraphQL fields must be passed as a +`GraphQLField` object explicitly. Similarly, GraphQL arguments must be +passed as `GraphQLArgument` objects. + +A more complex example is included in the top level [tests](tests) directory. + +Then, serve the result of a query against that type schema. + +```python +from graphql import graphql_sync + +query = '{ hello }' + +print(graphql_sync(schema, query)) +``` + +This runs a query fetching the one field defined, and then prints the result: + +```python +ExecutionResult(data={'hello': 'world'}, errors=None) +``` + +The `graphql_sync` function will first ensure the query is syntactically +and semantically valid before executing it, reporting errors otherwise. + +```python +from graphql import graphql_sync + +query = '{ boyhowdy }' + +print(graphql_sync(schema, query)) +``` + +Because we queried a non-existing field, we will get the following result: + +```python +ExecutionResult(data=None, errors=[GraphQLError( + "Cannot query field 'boyhowdy' on type 'RootQueryType'.", + locations=[SourceLocation(line=1, column=3)])]) +``` + +The `graphql_sync` function assumes that all resolvers return values +synchronously. By using coroutines as resolvers, you can also create +results in an asynchronous fashion with the `graphql` function. + +```python +import asyncio +from graphql import ( + graphql, GraphQLSchema, GraphQLObjectType, GraphQLField, GraphQLString) + + +async def resolve_hello(obj, info): + await asyncio.sleep(3) + return 'world' + +schema = GraphQLSchema( + query=GraphQLObjectType( + name='RootQueryType', + fields={ + 'hello': GraphQLField( + GraphQLString, + resolve=resolve_hello) + })) + + +async def main(): + query = '{ hello }' + print('Fetching the result...') + result = await graphql(schema, query) + print(result) + + +loop = asyncio.get_event_loop() +try: + loop.run_until_complete(main()) +finally: + loop.close() +``` + + +## Goals and restrictions + +GraphQL-core-next tries to reproduce the code of the reference implementation +GraphQL.js in Python as closely as possible and to stay up-to-date with +the latest development of GraphQL.js. + +It has been created as an alternative to +[GraphQL-core](https://github.com/graphql-python/graphql-core), +a prior work by Syrus Akbary, which was based on an older version of +GraphQL.js and targeted older Python versions. Some parts of the code base +of GraphQL.js have been inspired by GraphQL-core or directly taken over with +only slight modifications, but most of the code base has been re-implemented +from scratch, replicating the latest code in GraphQL.js and adding type hints. +Recently, GraphQL-core has also been modernized, but its focus is primarily +to serve as as a solid base library for [Graphene](http://graphene-python.org/), +a more high-level framework for building GraphQL APIs in Python. + +Design goals for the GraphQL-core-next library are: + +* to be a simple, cruft-free, state-of-the-art implementation of GraphQL using + current library and language versions +* to be very close to the GraphQL.js reference implementation, while still + using a Pythonic API and code style +* making use of Python type hints, similar to how GraphQL.js makes use of Flow +* replicate the complete Mocha-based test suite of GraphQL.js using + [pytest](https://docs.pytest.org/) + +Some restrictions (mostly in line with the design goals): + +* requires Python >= 3.6 +* does not support a few deprecated methods and options of GraphQL.js +* supports asynchronous operations only via async.io +* does not support additional executors and middleware like GraphQL-core +* the benchmarks have not yet been ported to Python + + +## Changelog + +Changes are tracked as +[GitHub releases](https://github.com/graphql-python/graphql-core-next/releases). + + +## Credits + +The GraphQL-core-next library +* has been created and is maintained by Christoph Zwerschke +* uses ideas and code from GraphQL-core, a prior work by Syrus Akbary +* is a Python port of GraphQL.js which has been created and is maintained + by Facebook, Inc. + + +## License + +GraphQL-core-next is +[MIT-licensed](https://github.com/graphql-python/graphql-core-next/blob/master/LICENSE). diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..f848e9f1 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,225 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " epub3 to make an epub3" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + @echo " dummy to check syntax errors of document sources" + +.PHONY: clean +clean: + rm -rf $(BUILDDIR)/* + +.PHONY: html +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +.PHONY: dirhtml +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +.PHONY: singlehtml +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +.PHONY: pickle +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +.PHONY: json +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +.PHONY: htmlhelp +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +.PHONY: qthelp +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/GraphQL-core-next.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/GraphQL-core-next.qhc" + +.PHONY: applehelp +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +.PHONY: devhelp +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/GraphQL-core-next" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/GraphQL-core-next" + @echo "# devhelp" + +.PHONY: epub +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +.PHONY: epub3 +epub3: + $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 + @echo + @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." + +.PHONY: latex +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +.PHONY: latexpdf +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: latexpdfja +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: text +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +.PHONY: man +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +.PHONY: texinfo +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +.PHONY: info +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +.PHONY: gettext +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +.PHONY: changes +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +.PHONY: linkcheck +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +.PHONY: doctest +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +.PHONY: coverage +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +.PHONY: xml +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +.PHONY: pseudoxml +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." + +.PHONY: dummy +dummy: + $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy + @echo + @echo "Build finished. Dummy builder generates no files." diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..70c92c0e --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +# +# GraphQL-core-next documentation build configuration file, created by +# sphinx-quickstart on Thu Jun 21 16:28:30 2018. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +# +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'GraphQL-core-next' +copyright = u'2018, Christoph Zwerschke' +author = u'Christoph Zwerschke' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = u'0.9' +# The full version, including alpha/beta/rc tags. +release = u'0.9.0' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# +# today = '' +# +# Else, today_fmt is used as the format for a strftime call. +# +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. +# " v documentation" by default. +# +# html_title = u'GraphQL-core-next v1.0.0' + +# A shorter title for the navigation bar. Default is the same as html_title. +# +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# +# html_logo = None + +# The name of an image file (relative to this directory) to use as a favicon of +# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# +# html_extra_path = [] + +# If not None, a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +# The empty string is equivalent to '%b %d, %Y'. +# +# html_last_updated_fmt = None + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# +# html_additional_pages = {} + +# If false, no module index is generated. +# +# html_domain_indices = True + +# If false, no index is generated. +# +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# +html_show_sourcelink = False + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' +# +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# 'ja' uses this config value. +# 'zh' user can custom change `jieba` dictionary path. +# +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'GraphQL-core-next-doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'GraphQL-core-next.tex', u'GraphQL-core-next Documentation', + u'Christoph Zwerschke', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# +# latex_use_parts = False + +# If true, show page references after internal links. +# +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# +# latex_appendices = [] + +# If false, no module index is generated. +# +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'graphql-core-next', u'GraphQL-core-next Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +# +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'GraphQL-core-next', u'GraphQL-core-next Documentation', + author, 'GraphQL-core-next', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +# +# texinfo_appendices = [] + +# If false, no module index is generated. +# +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# +# texinfo_no_detailmenu = False diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..b24ec349 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,23 @@ +Welcome to GraphQL-core-next +============================ + +Contents +-------- + +.. toctree:: + :maxdepth: 2 + + intro + + usage/index + + modules/graphql + + +Indices and tables +------------------ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/docs/intro.rst b/docs/intro.rst new file mode 100644 index 00000000..59ed1673 --- /dev/null +++ b/docs/intro.rst @@ -0,0 +1,95 @@ +Introduction +============ + +`GraphQL-core-next`_ is a Python port of `GraphQL.js`_, +the JavaScript reference implementation for GraphQL_, +a query language for APIs created by Facebook. + +`GraphQL`_ consists of three parts: + +* A type system that you define +* A query language that you use to query the API +* An execution and validation engine + +The reference implementation closely follows the `Specification for GraphQL`_ +which consists of the following sections: + +* Language_ +* `Type System`_ +* Introspection_ +* Validation_ +* Execution_ +* Response_ + +This division into subsections is reflected in the :ref:`sub-packages` of +GraphQL-core-next. Each of these sub-packages implements the aspects specified in +one of the sections of the specification. + + +Getting started +--------------- + +You can install GraphQL-core-next using pip_:: + + pip install graphql-core-next + +You can also install GraphQL-core-next with pipenv_, if you prefer that:: + + pipenv install graphql-core-next + +Now you can start using GraphQL-core-next by importing from the top-level +:mod:`graphql` package. Nearly everything defined in the sub-packages +can also be imported directly from the top-level package. + +For instance, using the types defined in the :mod:`graphql.type` package, +you can define a GraphQL schema, like this simple one:: + + from graphql import ( + GraphQLSchema, GraphQLObjectType, GraphQLField, GraphQLString) + + schema = GraphQLSchema( + query=GraphQLObjectType( + name='RootQueryType', + fields={ + 'hello': GraphQLField( + GraphQLString, + resolve=lambda obj, info: 'world') + })) + +The :mod:`graphql.execution` package implements the mechanism for executing +GraphQL queries. The top-level :func:`graphql` and :func:`graphql_sync` +functions also parse and validate queries using the :mod:`graphql.language` +and :mod:`graphql.validation` modules. + +So to validate and execute a query against our simple schema, you can do:: + + from graphql import graphql_sync + + query = '{ hello }' + + print(graphql_sync(schema, query)) + +This will yield the following output:: + + ExecutionResult(data={'hello': 'world'}, errors=None) + + +Reporting Issues and Contributing +--------------------------------- + +Please visit the `GitHub repository of GraphQL-core-next`_ if you're interested +in the current development or want to report issues or send pull requests. + +.. _GraphQL: https://graphql.org/ +.. _GraphQl.js: https://github.com/graphql/graphql-js +.. _GraphQl-core-next: https://github.com/graphql-python/graphql-core-next +.. _GitHub repository of GraphQL-core-next: https://github.com/graphql-python/graphql-core-next +.. _Specification for GraphQL: https://facebook.github.io/graphql/ +.. _Language: http://facebook.github.io/graphql/draft/#sec-Language +.. _Type System: http://facebook.github.io/graphql/draft/#sec-Type-System +.. _Introspection: http://facebook.github.io/graphql/draft/#sec-Introspection +.. _Validation: http://facebook.github.io/graphql/draft/#sec-Validation +.. _Execution: http://facebook.github.io/graphql/draft/#sec-Execution +.. _Response: http://facebook.github.io/graphql/draft/#sec-Response +.. _pip: https://pip.pypa.io/ +.. _pipenv: https://github.com/pypa/pipenv diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..7428d301 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,281 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. epub3 to make an epub3 + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + echo. coverage to run coverage check of the documentation if enabled + echo. dummy to check syntax errors of document sources + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +REM Check if sphinx-build is available and fallback to Python version if any +%SPHINXBUILD% 1>NUL 2>NUL +if errorlevel 9009 goto sphinx_python +goto sphinx_ok + +:sphinx_python + +set SPHINXBUILD=python -m sphinx.__init__ +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +:sphinx_ok + + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\GraphQL-core-next.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\GraphQL-core-next.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "epub3" ( + %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "coverage" ( + %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage + if errorlevel 1 exit /b 1 + echo. + echo.Testing of coverage in the sources finished, look at the ^ +results in %BUILDDIR%/coverage/python.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +if "%1" == "dummy" ( + %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. Dummy builder generates no files. + goto end +) + +:end diff --git a/docs/modules/error.rst b/docs/modules/error.rst new file mode 100644 index 00000000..c2e96264 --- /dev/null +++ b/docs/modules/error.rst @@ -0,0 +1,15 @@ +Error +===== + +.. automodule:: graphql.error + +.. autoexception:: GraphQLError +.. autoexception:: GraphQLSyntaxError + +.. autofunction:: located_error +.. autofunction:: print_error +.. autofunction:: format_error + +The :mod:`graphql.error` module also contains the :const:`INVALID` singleton +that is used to describe invalid or undefined values and corresponds to the +``undefined`` value in GraphQL.js. diff --git a/docs/modules/execution.rst b/docs/modules/execution.rst new file mode 100644 index 00000000..50d72898 --- /dev/null +++ b/docs/modules/execution.rst @@ -0,0 +1,16 @@ +Execution +========= + +.. py:module:: graphql.execution + +.. automodule:: graphql.execution + +.. autofunction:: execute +.. autofunction:: default_field_resolver +.. autofunction:: response_path_as_list + +.. autoclass:: ExecutionContext +.. autoclass:: ExecutionResult + +.. autofunction:: get_directive_values + diff --git a/docs/modules/graphql.rst b/docs/modules/graphql.rst new file mode 100644 index 00000000..8a330de1 --- /dev/null +++ b/docs/modules/graphql.rst @@ -0,0 +1,29 @@ +Reference +========= + +.. automodule:: graphql + +.. _top-level-functions: + +Top-Level Functions +------------------- + +.. autofunction:: graphql +.. autofunction:: graphql_sync + +.. _sub-packages: + +Sub-Packages +------------ + +.. toctree:: + :maxdepth: 1 + + error + execution + language + pyutils + subscription + type + utilities + validation diff --git a/docs/modules/language.rst b/docs/modules/language.rst new file mode 100644 index 00000000..eaf36c75 --- /dev/null +++ b/docs/modules/language.rst @@ -0,0 +1,104 @@ +Language +======== + +.. automodule:: graphql.language + +AST +--- + +.. autoclass:: Location +.. autoclass:: Node + +Each kind of AST node has its own class: + +.. autoclass:: ArgumentNode +.. autoclass:: BooleanValueNode +.. autoclass:: DefinitionNode +.. autoclass:: DirectiveDefinitionNode +.. autoclass:: DirectiveNode +.. autoclass:: DocumentNode +.. autoclass:: EnumTypeDefinitionNode +.. autoclass:: EnumTypeExtensionNode +.. autoclass:: EnumValueDefinitionNode +.. autoclass:: EnumValueNode +.. autoclass:: ExecutableDefinitionNode +.. autoclass:: FieldDefinitionNode +.. autoclass:: FieldNode +.. autoclass:: FloatValueNode +.. autoclass:: FragmentDefinitionNode +.. autoclass:: FragmentSpreadNode +.. autoclass:: InlineFragmentNode +.. autoclass:: InputObjectTypeDefinitionNode +.. autoclass:: InputObjectTypeExtensionNode +.. autoclass:: InputValueDefinitionNode +.. autoclass:: IntValueNode +.. autoclass:: InterfaceTypeDefinitionNode +.. autoclass:: InterfaceTypeExtensionNode +.. autoclass:: ListTypeNode +.. autoclass:: ListValueNode +.. autoclass:: NameNode +.. autoclass:: NamedTypeNode +.. autoclass:: NonNullTypeNode +.. autoclass:: NullValueNode +.. autoclass:: ObjectFieldNode +.. autoclass:: ObjectTypeDefinitionNode +.. autoclass:: ObjectTypeExtensionNode +.. autoclass:: ObjectValueNode +.. autoclass:: OperationDefinitionNode +.. autoclass:: OperationType +.. autoclass:: OperationTypeDefinitionNode +.. autoclass:: ScalarTypeDefinitionNode +.. autoclass:: ScalarTypeExtensionNode +.. autoclass:: SchemaDefinitionNode +.. autoclass:: SchemaExtensionNode +.. autoclass:: SelectionNode +.. autoclass:: SelectionSetNode +.. autoclass:: StringValueNode +.. autoclass:: TypeDefinitionNode +.. autoclass:: TypeExtensionNode +.. autoclass:: TypeNode +.. autoclass:: TypeSystemDefinitionNode +.. autoclass:: TypeSystemExtensionNode +.. autoclass:: UnionTypeDefinitionNode +.. autoclass:: UnionTypeExtensionNode +.. autoclass:: ValueNode +.. autoclass:: VariableDefinitionNode +.. autoclass:: VariableNode + +Lexer +----- + +.. autoclass:: Lexer +.. autoclass:: TokenKind +.. autoclass:: Token + +Location +-------- + +.. autofunction:: get_location +.. autoclass:: SourceLocation + + +Parser +------ + +.. autofunction:: parse +.. autofunction:: parse_type +.. autofunction:: parse_value + +Source +------ + +.. autoclass:: Source + +Visitor +------- + +.. autofunction:: visit +.. autoclass:: Visitor +.. autoclass:: ParallelVisitor +.. autoclass:: TypeInfoVisitor + +The module also exports the constants :const:`BREAK`, :const:`SKIP`, +:const:`REMOVE` and :const:`IDLE` that are used as special return values +in the :class:`Visitor` methods. diff --git a/docs/modules/pyutils.rst b/docs/modules/pyutils.rst new file mode 100644 index 00000000..a4114f9c --- /dev/null +++ b/docs/modules/pyutils.rst @@ -0,0 +1,20 @@ +PyUtils +======= + +.. automodule:: graphql.pyutils + +.. autofunction:: camel_to_snake +.. autofunction:: snake_to_camel +.. autofunction:: cached_property +.. autofunction:: contain_subset +.. autofunction:: dedent +.. autoclass:: EventEmitter +.. autoclass:: EventEmitterAsyncIterator +.. autofunction:: is_finite +.. autofunction:: is_integer +.. autofunction:: is_invalid +.. autofunction:: is_nullish +.. autoclass:: MaybeAwaitable +.. autofunction:: or_list +.. autofunction:: quoted_or_list +.. autofunction:: suggestion_list diff --git a/docs/modules/subscription.rst b/docs/modules/subscription.rst new file mode 100644 index 00000000..8a9cacac --- /dev/null +++ b/docs/modules/subscription.rst @@ -0,0 +1,8 @@ +Subscription +============ + +.. automodule:: graphql.subscription + +.. autofunction:: subscribe +.. autofunction:: create_source_event_stream + diff --git a/docs/modules/type.rst b/docs/modules/type.rst new file mode 100644 index 00000000..d8223350 --- /dev/null +++ b/docs/modules/type.rst @@ -0,0 +1,191 @@ +Type +==== + +.. automodule:: graphql.type + +Definition +---------- + +Predicates +^^^^^^^^^^ + +.. autofunction:: is_composite_type +.. autofunction:: is_enum_type +.. autofunction:: is_input_object_type +.. autofunction:: is_input_type +.. autofunction:: is_interface_type +.. autofunction:: is_leaf_type +.. autofunction:: is_list_type +.. autofunction:: is_named_type +.. autofunction:: is_non_null_type +.. autofunction:: is_nullable_type +.. autofunction:: is_object_type +.. autofunction:: is_output_type +.. autofunction:: is_scalar_type +.. autofunction:: is_type +.. autofunction:: is_union_type +.. autofunction:: is_wrapping_type + +Assertions +^^^^^^^^^^ + +.. autofunction:: assert_abstract_type +.. autofunction:: assert_composite_type +.. autofunction:: assert_enum_type +.. autofunction:: assert_input_object_type +.. autofunction:: assert_input_type +.. autofunction:: assert_interface_type +.. autofunction:: assert_leaf_type +.. autofunction:: assert_list_type +.. autofunction:: assert_named_type +.. autofunction:: assert_non_null_type +.. autofunction:: assert_nullable_type +.. autofunction:: assert_object_type +.. autofunction:: assert_output_type +.. autofunction:: assert_scalar_type +.. autofunction:: assert_type +.. autofunction:: assert_union_type +.. autofunction:: assert_wrapping_type + +Un-modifiers +^^^^^^^^^^^^ + +.. autofunction:: get_nullable_type +.. autofunction:: get_named_type + +Definitions +^^^^^^^^^^^ +.. autoclass:: GraphQLEnumType +.. autoclass:: GraphQLInputObjectType +.. autoclass:: GraphQLInterfaceType +.. autoclass:: GraphQLObjectType +.. autoclass:: GraphQLScalarType +.. autoclass:: GraphQLUnionType + +Type Wrappers +^^^^^^^^^^^^^ + +.. autoclass:: GraphQLList +.. autoclass:: GraphQLNonNull + +Types +^^^^^ +.. autoclass:: GraphQLAbstractType +.. autoclass:: GraphQLArgument +.. autoclass:: GraphQLArgumentMap +.. autoclass:: GraphQLCompositeType +.. autoclass:: GraphQLEnumValue +.. autoclass:: GraphQLEnumValueMap +.. autoclass:: GraphQLField +.. autoclass:: GraphQLFieldMap +.. autoclass:: GraphQLInputField +.. autoclass:: GraphQLInputFieldMap +.. autoclass:: GraphQLInputType +.. autoclass:: GraphQLLeafType +.. autoclass:: GraphQLNamedType +.. autoclass:: GraphQLNullableType +.. autoclass:: GraphQLOutputType +.. autoclass:: GraphQLType +.. autoclass:: GraphQLWrappingType + +.. autoclass:: Thunk + +Resolvers +^^^^^^^^^ + +.. autoclass:: GraphQLFieldResolver +.. autoclass:: GraphQLIsTypeOfFn +.. autoclass:: GraphQLResolveInfo +.. autoclass:: GraphQLTypeResolver +.. autoclass:: ResponsePath + +Directives +---------- + +Predicates +^^^^^^^^^^ + +.. autofunction:: is_directive +.. autofunction:: is_specified_directive + +Definitions +^^^^^^^^^^^ + +.. autoclass:: GraphQLDirective +.. autoclass:: GraphQLIncludeDirective +.. autoclass:: GraphQLSkipDirective +.. autoclass:: GraphQLDeprecatedDirective + +The list of all specified directives is available as +:data:`specified_directives`. + +The module also exports the constant :const:`DEFAULT_DEPRECATION_REASON` +that can be used as the default value for :obj:`deprecation_reason`. + +Introspection +------------- + +Predicates +^^^^^^^^^^ + +.. autofunction:: is_introspection_type + + +Definitions +^^^^^^^^^^^ + +.. autoclass:: TypeKind +.. autoclass:: TypeMetaFieldDef +.. autoclass:: TypeNameMetaFieldDef +.. autoclass:: SchemaMetaFieldDef + +The list of all introspection types is available as +:data:`introspection_types`. + +Scalars +------- + +Predicates +^^^^^^^^^^ + +.. autofunction:: is_specified_scalar_type + +Definitions +^^^^^^^^^^^ + +.. autoclass:: GraphQLBoolean +.. autoclass:: GraphQLFloat +.. autoclass:: GraphQLID +.. autoclass:: GraphQLInt +.. autoclass:: GraphQLString + +The list of all specified directives is available as +:data:`specified_directives`. + +Schema +------ + +Predicates +^^^^^^^^^^ + +.. autofunction:: is_schema + +Definitions +^^^^^^^^^^^ + +.. autoclass:: GraphQLSchema + + +Validate +-------- + +Functions: +^^^^^^^^^^ + +.. autofunction:: validate_schema + + +Assertions +^^^^^^^^^^ + +.. autofunction:: assert_valid_schema diff --git a/docs/modules/utilities.rst b/docs/modules/utilities.rst new file mode 100644 index 00000000..6f0809e1 --- /dev/null +++ b/docs/modules/utilities.rst @@ -0,0 +1,102 @@ +Utilities +========= + +.. automodule:: graphql.utilities + +The GraphQL query recommended for a full schema introspection: + +.. autofunction:: get_introspection_query + +Gets the target Operation from a Document: + +.. autofunction:: get_operation_ast + +Gets the Type for the target Operation AST: + +.. autofunction:: get_operation_root_type + +Convert a GraphQLSchema to an IntrospectionQuery: + +.. autofunction:: introspection_from_schema + +Build a GraphQLSchema from an introspection result: + +.. autofunction:: build_client_schema + +Build a GraphQLSchema from GraphQL Schema language: + +.. autofunction:: build_ast_schema +.. autofunction:: build_schema +.. autofunction:: get_description + +Extends an existing GraphQLSchema from a parsed GraphQL Schema language AST: + +.. autofunction:: extend_schema + +Sort a GraphQLSchema: +.. autofunction:: lexicographic_sort_schema + +Print a GraphQLSchema to GraphQL Schema language: + +.. autofunction:: print_introspection_schema +.. autofunction:: print_schema +.. autofunction:: print_type +.. autofunction:: print_value + +Create a GraphQLType from a GraphQL language AST: + +.. autofunction:: type_from_ast + +Create a Python value from a GraphQL language AST with a type: + +.. autofunction:: value_from_ast + +Create a Python value from a GraphQL language AST without a type: + +.. autofunction:: value_from_ast_untyped + +Create a GraphQL language AST from a Python value: + +.. autofunction:: ast_from_value + +A helper to use within recursive-descent visitors which need to be aware of +the GraphQL type system: + +.. autoclass:: TypeInfo + +Coerces a Python value to a GraphQL type, or produces errors: + +.. autofunction:: coerce_value + +Concatenates multiple AST together: + +.. autofunction:: concat_ast + +Separates an AST into an AST per Operation: + +.. autofunction:: separate_operations + +Comparators for types: + +.. autofunction:: is_equal_type +.. autofunction:: is_type_sub_type_of +.. autofunction:: do_types_overlap + +Asserts that a string is a valid GraphQL name: + +.. autofunction:: assert_valid_name +.. autofunction:: is_valid_name_error + +Compares two GraphQLSchemas and detects breaking changes: + +.. autofunction:: find_breaking_changes +.. autofunction:: find_dangerous_changes + +.. autoclass:: BreakingChange +.. autoclass:: BreakingChangeType +.. autoclass:: DangerousChange +.. autoclass:: DangerousChangeType + +Report all deprecated usage within a GraphQL document: + +.. autofunction:: find_deprecated_usages diff --git a/docs/modules/validation.rst b/docs/modules/validation.rst new file mode 100644 index 00000000..e41173ba --- /dev/null +++ b/docs/modules/validation.rst @@ -0,0 +1,122 @@ +Validation +========== + +.. automodule:: graphql.validation + +.. autofunction:: validate + +.. autoclass:: ValidationContext + +Rules +----- + +This list includes all validation rules defined by the GraphQL spec. +The order of the rules in this list has been adjusted to lead to the +most clear output when encountering multiple validation errors: + +.. autodata:: specified_rules + +Spec Section: "Executable Definitions" + +.. autoclass:: ExecutableDefinitionsRule + +Spec Section: "Field Selections on Objects, Interfaces, and Unions Types" + +.. autoclass:: FieldsOnCorrectTypeRule + +Spec Section: "Fragments on Composite Types" + +.. autoclass:: FragmentsOnCompositeTypesRule + +Spec Section: "Argument Names" + +.. autoclass:: KnownArgumentNamesRule + +Spec Section: "Directives Are Defined" + +.. autoclass:: KnownDirectivesRule + +Spec Section: "Fragment spread target defined" + +.. autoclass:: KnownFragmentNamesRule + +Spec Section: "Fragment Spread Type Existence" + +.. autoclass:: KnownTypeNamesRule + +Spec Section: "Lone Anonymous Operation" + +.. autoclass:: LoneAnonymousOperationRule + +Spec Section: "Fragments must not form cycles" + +.. autoclass:: NoFragmentCyclesRule + +Spec Section: "All Variable Used Defined" + +.. autoclass:: NoUndefinedVariablesRule + +Spec Section: "Fragments must be used" + +.. autoclass:: NoUnusedFragmentsRule + +Spec Section: "All Variables Used" + +.. autoclass:: NoUnusedVariablesRule + +Spec Section: "Field Selection Merging" + +.. autoclass:: OverlappingFieldsCanBeMergedRule + +Spec Section: "Fragment spread is possible" + +.. autoclass:: PossibleFragmentSpreadsRule + +Spec Section: "Argument Optionality" + +.. autoclass:: ProvidedRequiredArgumentsRule + +Spec Section: "Leaf Field Selections" + +.. autoclass:: ScalarLeafsRule + +Spec Section: "Subscriptions with Single Root Field" + +.. autoclass:: SingleFieldSubscriptionsRule + +Spec Section: "Argument Uniqueness" + +.. autoclass:: UniqueArgumentNamesRule + +Spec Section: "Directives Are Unique Per Location" + +.. autoclass:: UniqueDirectivesPerLocationRule + +Spec Section: "Fragment Name Uniqueness" + +.. autoclass:: UniqueFragmentNamesRule + +Spec Section: "Input Object Field Uniqueness" + +.. autoclass:: UniqueInputFieldNamesRule + +Spec Section: "Operation Name Uniqueness" + +.. autoclass:: UniqueOperationNamesRule + +Spec Section: "Variable Uniqueness" + +.. autoclass:: UniqueVariableNamesRule + +Spec Section: "Value Type Correctness" + +.. autoclass:: ValuesOfCorrectTypeRule + +Spec Section: "Variables are Input Types" + +.. autoclass:: VariablesAreInputTypesRule + +Spec Section: "All Variable Usages Are Allowed" + +.. autoclass:: VariablesInAllowedPositionRule + diff --git a/docs/usage/extension.rst b/docs/usage/extension.rst new file mode 100644 index 00000000..7ba81f1e --- /dev/null +++ b/docs/usage/extension.rst @@ -0,0 +1,47 @@ +Extending a Schema +------------------ + +With GraphQL-core-next you can also extend a given schema using type +extensions. For example, we might want to add a ``lastName`` property to our +``Human`` data type to retrieve only the last name of the person. + +This can be achieved with the :func:`graphql.utilities.extend_schema` +function as follows:: + + from graphql import extend_schema, parse + + schema = extend_schema(schema, parse(""" + extend type Human { + lastName: String + } + """)) + +Note that this function expects the extensions as an AST, which we can +get using the :func:`graphql.language.parse` function. Also note that +the `extend_schema` function does not alter the original schema, but +returns a new schema object. + +We also need to attach a resolver function to the new field:: + + def get_last_name(human, info): + return human['name'].rsplit(None, 1)[-1] + + schema.get_type('Human').fields['lastName'].resolve = get_last_name + +Now we can query only the last name of a human:: + + result = graphql_sync(schema, """ + { + human(id: "1000") { + lastName + homePlanet + } + } + """) + print(result) + +This query will give the following result:: + + ExecutionResult( + data={'human': {'lastName': 'Skywalker', 'homePlanet': 'Tatooine'}}, + errors=None) diff --git a/docs/usage/index.rst b/docs/usage/index.rst new file mode 100644 index 00000000..8145f7dc --- /dev/null +++ b/docs/usage/index.rst @@ -0,0 +1,18 @@ +Usage +===== + +GraphQL-core-next provides two important capabilities: building a type schema, +and serving queries against that type schema. + +.. toctree:: + :maxdepth: 2 + + schema + resolvers + queries + sdl + introspection + parser + extension + validator + other diff --git a/docs/usage/introspection.rst b/docs/usage/introspection.rst new file mode 100644 index 00000000..0890d17b --- /dev/null +++ b/docs/usage/introspection.rst @@ -0,0 +1,63 @@ +Using an Introspection Query +---------------------------- + +A third way of building a schema is using an introspection query on an +existing server. This is what GraphiQL uses to get information about the +schema on the remote server. You can create an introspection query using +GraphQL-core-next with:: + + from graphql import get_introspection_query + + query = get_introspection_query(descriptions=True) + +This will also yield the descriptions of the introspected schema fields. +You can also create a query that omits the descriptions with:: + + query = get_introspection_query(descriptions=False) + +In practice you would run this query against a remote server, but we can +also run it against the schema we have just built above:: + + introspection_query_result = graphql_sync(schema, query) + +The ``data`` attribute of the introspection query result now gives us a +dictionary, which constitutes a third way of describing a GraphQL schema:: + + {'__schema': { + 'queryType': {'name': 'Query'}, + 'mutationType': None, 'subscriptionType': None, + 'types': [ + {'kind': 'OBJECT', 'name': 'Query', 'description': None, + 'fields': [{ + 'name': 'hero', 'description': None, + 'args': [{'name': 'episode', 'description': ... }], + ... }, ... ], ... }, + ... ], + ... } + } + +This result contains all the information that is available in the SDL +description of the schema, i.e. it does not contain the resolve functions +and information on the server-side values of the enum types. + +You can convert the introspection result into ``GraphQLSchema`` with +GraphQL-core-next by using the :func:`graphql.utilities.build_client_schema` +function:: + + from graphql import build_client_schema + + client_schema = build_client_schema(introspection_query_result.data) + + +It is also possible to convert the result to SDL with GraphQL-core-next by +using the :func:`graphql.utilities.print_schema` function:: + + from graphql import print_schema + + sdl = print_schema(client_schema) + print(sdl) + +This prints the SDL representation of the schema that we started with. + +As you see, it is easy to convert between the three forms of representing +a GraphQL schema in GraphQL-core-next. diff --git a/docs/usage/other.rst b/docs/usage/other.rst new file mode 100644 index 00000000..1c52b0ca --- /dev/null +++ b/docs/usage/other.rst @@ -0,0 +1,10 @@ +Other Usages +------------ + +GraphQLL-core-next provides many more low-level functions that can be used to +work with GraphQL schemas and queries. We encourage you to explore the contents +of the various :ref:`sub-packages`, particularly :mod:`graphql.utilities`, +and to look into the source code and tests of `GraphQL-core-next`_ in order +to find all the functionality that is provided and understand it in detail. + +.. _GraphQL-core-next: https://github.com/graphql-python/graphql-core-next diff --git a/docs/usage/parser.rst b/docs/usage/parser.rst new file mode 100644 index 00000000..2e6f076e --- /dev/null +++ b/docs/usage/parser.rst @@ -0,0 +1,70 @@ +Parsing GraphQL Queries and Schema Notation +------------------------------------------- + +When executing GraphQL queries, the first step that happens under the hood is +parsing the query. But GraphQL-core-next also exposes the parser for direct +usage via the :func:`graphql.language.parse` function. When you pass this +function a GraphQL source code, it will be parsed and returned as a Document, +i.e. an abstract syntax tree (AST) of :class:`graphql.language.Node` objects. +The root node will be a :class:`graphql.language.DocumentNode`, with child +nodes of different kinds corresponding to the GraphQL source. The nodes also +carry information on the location in the source code that they correspond to. + +Here is an example:: + + from graphql import parse + + document = parse(""" + type Query { + me: User + } + + type User { + id: ID + name: String + } + """) + +You can also leave out the information on the location in the source code +when creating the AST document:: + + document = parse(..., no_location=True) + +This will give the same result as manually creating the AST document:: + + document = DocumentNode(definitions=[ + ObjectTypeDefinitionNode( + name=NameNode(value='Query'), + fields=[ + FieldDefinitionNode( + name=NameNode(value='me'), + type=NamedTypeNode(name=NameNode(value='User')), + arguments=[], directives=[]) + ], directives=[], interfaces=[]), + ObjectTypeDefinitionNode( + name=NameNode(value='User'), + fields=[ + FieldDefinitionNode( + name=NameNode(value='id'), + type=NamedTypeNode( + name=NameNode(value='ID')), + arguments=[], directives=[]), + FieldDefinitionNode( + name=NameNode(value='name'), + type=NamedTypeNode( + name=NameNode(value='String')), + arguments=[], directives=[]), + ], directives=[], interfaces=[]), + ]) + + +When parsing with `no_location=False` (the default), the AST nodes will +also have a :attr:`loc` attribute carrying the information on the source +code location corresponding to the AST nodes. + +When there is a syntax error in the GraphQL source code, then the +:func:`parse` function will raise a :exc:`GraphQLSyntaxError`. + +The parser can not only be used to parse GraphQL queries, but also to parse +the GraphQL schema definition language. This will result in another way of +representing a GraphQL schema, as an AST document. diff --git a/docs/usage/queries.rst b/docs/usage/queries.rst new file mode 100644 index 00000000..92bc17f5 --- /dev/null +++ b/docs/usage/queries.rst @@ -0,0 +1,130 @@ +Executing Queries +----------------- + +Now that we have defined the schema and breathed life into it with our +resolver functions, we can execute arbitrary query against the schema. + +The :mod:`graphql` package provides the :func:`graphql.graphql` function +to execute queries. This is the main feature of GraphQL-core-next. + +Note however that this function is actually a coroutine intended to be used +in asynchronous code running in an event loop. + +Here is one way to use it:: + + import asyncio + from graphql import graphql + + async def query_artoo(): + result = await graphql(schema, """ + { + droid(id: "2001") { + name + primaryFunction + } + } + """) + print(result) + + asyncio.get_event_loop().run_until_complete(main()) + +In our query, we asked for the droid with the id 2001, which is R2-D2, and +its primary function, Astromech. When everything has been implemented +correctly as shown above, you should get the expected result:: + + ExecutionResult( + data={'droid': {'name': 'R2-D2', 'primaryFunction': 'Astromech'}}, + errors=None) + +The :class:`execution.ExecutionResult` has a :attr:`data` attribute +with the actual result, and an :attr:`errors` attribute with a list of errors +if there were any. + +If all your resolvers work synchronously, as in our case, you can also use +the :func:`graphql.graphql_sync` function to query the result in ordinary +synchronous code:: + + from graphql import graphql_sync + + result = graphql_sync(schema, """ + query FetchHuman($id: String!) { + human(id: $id) { + name + homePlanet + } + } + """, variable_values={'id': '1000'}) + print(result) + +Here we asked for the human with the id 1000, Luke Skywalker, and his home +planet, Tatooine. So the output of the code above is:: + + ExecutionResult( + data={'human': {'name': 'Luke Skywalker', 'homePlanet': 'Tatooine'}}, + errors=None) + +Let's see what happens when we make a mistake in the query, by querying a +non-existing ``homeTown`` field:: + + result = graphql_sync(schema, """ + { + human(id: "1000") { + name + homeTown + } + } + """) + print(result) + +You will get the following result as output:: + + ExecutionResult(data=None, errors=[GraphQLError( + "Cannot query field 'homeTown' on type 'Human'." + " Did you mean 'homePlanet'?", + locations=[SourceLocation(line=5, column=7)])]) + +This is very helpful. Not only do we get the exact location of the mistake +in the query, but also a suggestion for correcting the bad field name. + +GraphQL also allows to request the meta field ``__typename``. We can use this +to verify that the hero of "The Empire Strikes Back" episode is Luke Skywalker +and that he is in fact a human:: + + result = graphql_sync(schema, """ + { + hero(episode: EMPIRE) { + __typename + name + } + } + """) + print(result) + +This gives the following output:: + + ExecutionResult( + data={'hero': {'__typename': 'Human', 'name': 'Luke Skywalker'}}, + errors=None) + +Finally, let's see what happens when we try to access the secret backstory +of our hero:: + + result = graphql_sync(schema, """ + { + hero(episode: EMPIRE) { + name + secretBackstory + } + } + """) + print(result) + +While we get the name of the hero, the secret backstory fields remains empty, +since its resolver function raises an error. However, we get the error that +has been raised by the resolver in the :attr:`errors` attribute of the result:: + + ExecutionResult( + data={'hero': {'name': 'Luke Skywalker', 'secretBackstory': None}}, + errors=[GraphQLError('secretBackstory is secret.', + locations=[SourceLocation(line=5, column=9)], + path=['hero', 'secretBackstory'])]) diff --git a/docs/usage/resolvers.rst b/docs/usage/resolvers.rst new file mode 100644 index 00000000..a818ed11 --- /dev/null +++ b/docs/usage/resolvers.rst @@ -0,0 +1,99 @@ +Implementing the Resolver Functions +----------------------------------- + +Before we can execute queries against our schema, we also need to define the +data (the humans and droids appearing in the Star Wars trilogy) and implement +resolver functions that fetch the data (at the beginning of our schema module, +because we are referencing them later):: + + luke = dict( + id='1000', name='Luke Skywalker', homePlanet='Tatooine', + friends=['1002', '1003', '2000', '2001'], appearsIn=[4, 5, 6]) + + vader = dict( + id='1001', name='Darth Vader', homePlanet='Tatooine', + friends=['1004'], appearsIn=[4, 5, 6]) + + han = dict( + id='1002', name='Han Solo', homePlanet=None, + friends=['1000', '1003', '2001'], appearsIn=[4, 5, 6]) + + leia = dict( + id='1003', name='Leia Organa', homePlanet='Alderaan', + friends=['1000', '1002', '2000', '2001'], appearsIn=[4, 5, 6]) + + tarkin = dict( + id='1004', name='Wilhuff Tarkin', homePlanet=None, + friends=['1001'], appearsIn=[4]) + + human_data = { + '1000': luke, '1001': vader, '1002': han, '1003': leia, '1004': tarkin} + + threepio = dict( + id='2000', name='C-3PO', primaryFunction='Protocol', + friends=['1000', '1002', '1003', '2001'], appearsIn=[4, 5, 6]) + + artoo = dict( + id='2001', name='R2-D2', primaryFunction='Astromech', + friends=['1000', '1002', '1003'], appearsIn=[4, 5, 6]) + + droid_data = { + '2000': threepio, '2001': artoo} + + + def get_character_type(character, info): + return 'Droid' if character['id'] in droid_data else 'Human' + + + def get_character(id): + """Helper function to get a character by ID.""" + return human_data.get(id) or droid_data.get(id) + + + def get_friends(character, info): + """Allows us to query for a character's friends.""" + return map(get_character, character.friends) + + + def get_hero(root, info, episode): + """Allows us to fetch the undisputed hero of the trilogy, R2-D2.""" + if episode == 5: + return luke # Luke is the hero of Episode V + return artoo # Artoo is the hero otherwise + + + def get_human(root, info, id): + """Allows us to query for the human with the given id.""" + return human_data.get(id) + + + def get_droid(root, info, id): + """Allows us to query for the droid with the given id.""" + return droid_data.get(id) + + + def get_secret_backstory(character, info): + """Raise an error when attempting to get the secret backstory.""" + raise RuntimeError('secretBackstory is secret.') + + +Note that the resolver functions get the current object as first argument. +For a field on the root Query type this is often not used, but a root object +can also be defined when executing the query. As the second argument, they +get an object containing execution information, as defined in the +:class:`graphql.type.GraphQLResolveInfo` class. This object also has a +:attr:`context` attribute that can be used to provide every resolver with +contextual information like the currently logged in user, or a database +session. In our simple example we don't authenticate users and use static +data instead of a database, so we don't make use of it here. +In addition to these two arguments, resolver functions optionally get the +defined for the field in the schema, using the same names (the names are not +translated from GraphQL naming conventions to Python naming conventions). + +Also note that you don't need to provide resolvers for simple attribute access +or for fetching items from Python dictionaries. + +Finally, note that our data uses the internal values of the ``Episode`` enum +that we have defined above, not the descriptive enum names that are used +externally. For example, ``NEWHOPE`` ("A New Hope") has internally the actual +episode number 4 as value. diff --git a/docs/usage/schema.rst b/docs/usage/schema.rst new file mode 100644 index 00000000..156d5e29 --- /dev/null +++ b/docs/usage/schema.rst @@ -0,0 +1,195 @@ +Building a Type Schema +---------------------- + +Using the classes in the :mod:`graphql.type` sub-package as building blocks, +you can build a complete GraphQL type schema. + +Let's take the following schema as an example, which will allow us to +query our favorite heroes from the Star Wars trilogy:: + + enum Episode { NEWHOPE, EMPIRE, JEDI } + + interface Character { + id: String! + name: String + friends: [Character] + appearsIn: [Episode] + } + + type Human implements Character { + id: String! + name: String + friends: [Character] + appearsIn: [Episode] + homePlanet: String + } + + type Droid implements Character { + id: String! + name: String + friends: [Character] + appearsIn: [Episode] + primaryFunction: String + } + + type Query { + hero(episode: Episode): Character + human(id: String!): Human + droid(id: String!): Droid + } + +We have been using the so called GraphQL schema definition language (SDL) here +to describe the schema. While it is also possible to build a schema directly +from this notation using GraphQL-core-next, let's first create that schema +manually by assembling the types defined here using Python classes, adding +resolver functions written in Python for querying the data. + +First, we need to import all the building blocks from the :mod:`graphql.type` +sub-package. Note that you don't need to import from the sub-packages, since +nearly everything is also available directly in the top :mod:`graphql` package:: + + from graphql import ( + GraphQLArgument, GraphQLEnumType, GraphQLEnumValue, + GraphQLField, GraphQLInterfaceType, GraphQLList, GraphQLNonNull, + GraphQLObjectType, GraphQLSchema, GraphQLString) + +Next, we need to build the enum type ``Episode``:: + + episode_enum = GraphQLEnumType('Episode', { + 'NEWHOPE': GraphQLEnumValue(4, description='Released in 1977.'), + 'EMPIRE': GraphQLEnumValue(5, description='Released in 1980.'), + 'JEDI': GraphQLEnumValue(6, description='Released in 1983.') + }, description='One of the films in the Star Wars Trilogy') + +If you don't need the descriptions for the enum values, you can also define +the enum type like this using an underlying Python ``Enum`` type:: + + from enum import Enum + + class EpisodeEnum(Enum): + NEWHOPE = 4 + EMPIRE = 5 + JEDI = 6 + + episode_enum = GraphQLEnumType( + 'Episode', EpisodeEnum, + description='One of the films in the Star Wars Trilogy') + +You can also use a Python dictionary instead of a Python ``Enum`` type to +define the GraphQL enum type:: + + episode_enum = GraphQLEnumType( + 'Episode', {'NEWHOPE': 4, 'EMPIRE': 5, 'JEDI': 6}, + description='One of the films in the Star Wars Trilogy') + +Our schema also contains a ``Character`` interface. Here is how we build it:: + + character_interface = GraphQLInterfaceType('Character', lambda: { + 'id': GraphQLField( + GraphQLNonNull(GraphQLString), + description='The id of the character.'), + 'name': GraphQLField( + GraphQLString, + description='The name of the character.'), + 'friends': GraphQLField( + GraphQLList(character_interface), + description='The friends of the character,' + ' or an empty list if they have none.'), + 'appearsIn': GraphQLField( + GraphQLList(episode_enum), + description='Which movies they appear in.'), + 'secretBackstory': GraphQLField( + GraphQLString, + description='All secrets about their past.')}, + resolve_type=get_character_type, + description='A character in the Star Wars Trilogy') + +Note that we did not pass the dictionary of fields to the +``GraphQLInterfaceType`` directly, but using a lambda function (a +so-called "thunk"). This is necessary because the fields are referring +back to the character interface that we are just defining. Whenever you +have such recursive definitions in GraphQL-core-next, you need to use thunks. +Otherwise, you can pass everything directly. + +Characters in the Star Wars trilogy are either humans or droids. +So we define a ``Human`` and a ``Droid`` type, +which both implement the ``Character`` interface:: + + human_type = GraphQLObjectType('Human', lambda: { + 'id': GraphQLField( + GraphQLNonNull(GraphQLString), + description='The id of the human.'), + 'name': GraphQLField( + GraphQLString, + description='The name of the human.'), + 'friends': GraphQLField( + GraphQLList(character_interface), + description='The friends of the human,' + ' or an empty list if they have none.', + resolve=get_friends), + 'appearsIn': GraphQLField( + GraphQLList(episode_enum), + description='Which movies they appear in.'), + 'homePlanet': GraphQLField( + GraphQLString, + description='The home planet of the human, or null if unknown.'), + 'secretBackstory': GraphQLField( + GraphQLString, + resolve=get_secret_backstory, + description='Where are they from' + ' and how they came to be who they are.')}, + interfaces=[character_interface], + description='A humanoid creature in the Star Wars universe.') + + droid_type = GraphQLObjectType('Droid', lambda: { + 'id': GraphQLField( + GraphQLNonNull(GraphQLString), + description='The id of the droid.'), + 'name': GraphQLField( + GraphQLString, + description='The name of the droid.'), + 'friends': GraphQLField( + GraphQLList(character_interface), + description='The friends of the droid,' + ' or an empty list if they have none.', + resolve=get_friends, + ), + 'appearsIn': GraphQLField( + GraphQLList(episode_enum), + description='Which movies they appear in.'), + 'secretBackstory': GraphQLField( + GraphQLString, + resolve=get_secret_backstory, + description='Construction date and the name of the designer.'), + 'primaryFunction': GraphQLField( + GraphQLString, + description='The primary function of the droid.') + }, + interfaces=[character_interface], + description='A mechanical creature in the Star Wars universe.') + +Now that we have defined all used result types, we can construct the ``Query`` +type for our schema:: + + query_type = GraphQLObjectType('Query', lambda: { + 'hero': GraphQLField(character_interface, args={ + 'episode': GraphQLArgument(episode_enum, description=( + 'If omitted, returns the hero of the whole saga.' + ' If provided, returns the hero of that particular episode.'))}, + resolve=get_hero), + 'human': GraphQLField(human_type, args={ + 'id': GraphQLArgument( + GraphQLNonNull(GraphQLString), description='id of the human')}, + resolve=get_human), + 'droid': GraphQLField(droid_type, args={ + 'id': GraphQLArgument( + GraphQLNonNull(GraphQLString), description='id of the droid')}, + resolve=get_droid)}) + + +Using our query type we can define our schema:: + + schema = GraphQLSchema(query_type) + +Note that you can also pass a mutation type and a subscription type as +additional arguments to the ``GraphQLSchema``. diff --git a/docs/usage/sdl.rst b/docs/usage/sdl.rst new file mode 100644 index 00000000..519e324b --- /dev/null +++ b/docs/usage/sdl.rst @@ -0,0 +1,83 @@ +Using the Schema Definition Language +------------------------------------ + +Above we defined the GraphQL schema as Python code, using the ``GraphQLSchema`` +class and other classes representing the various GraphQL types. + +GraphQL-core-next also provides a language-agnostic way of defining a GraphQL +schema using the GraphQL schema definition language (SDL) which is also part of +the GraphQL specification. To do this, we simply feed the SDL as a string to +the :func:`graphql.utilities.build_schema` function:: + + from graphql import build_schema + + schema = build_schema(""" + + enum Episode { NEWHOPE, EMPIRE, JEDI } + + interface Character { + id: String! + name: String + friends: [Character] + appearsIn: [Episode] + } + + type Human implements Character { + id: String! + name: String + friends: [Character] + appearsIn: [Episode] + homePlanet: String + } + + type Droid implements Character { + id: String! + name: String + friends: [Character] + appearsIn: [Episode] + primaryFunction: String + } + + type Query { + hero(episode: Episode): Character + human(id: String!): Human + droid(id: String!): Droid + } + """) + +The result is a ``GraphQLSchema`` object just like the one we defined above, +except for the resolver functions which cannot be defined in the SDL. + +We would need to manually attach these functions to the schema, like so:: + + schema.query_type.fields['hero'].resolve = get_hero + schema.get_type('Character').resolve_type = get_character_type + +Another problem is that the SDL does not define the server side values +of the ``Episode`` enum type which are returned by the resolver functions +and which are different from the names used for the episode. + +So we would also need to manually define these values, like so:: + + for name, value in schema.get_type('Episode').values.items: + value.value = EpisodeEnum[name].value + +This would allow us to query the schema built from SDL just like the +manually assembled schema:: + + result = graphql_sync(schema, """ + { + hero(episode: EMPIRE) { + name + appearsIn + } + } + """) + print(result) + +And we would get the expected result:: + + ExecutionResult( + data={'hero': {'name': 'Luke Skywalker', + 'appearsIn': ['NEWHOPE', 'EMPIRE', 'JEDI']}}, + errors=None) diff --git a/docs/usage/validator.rst b/docs/usage/validator.rst new file mode 100644 index 00000000..efd7626f --- /dev/null +++ b/docs/usage/validator.rst @@ -0,0 +1,41 @@ +Validating GraphQL Queries +-------------------------- + +When executing GraphQL queries, the second step that happens under the hood +after parsing the source code is a validation against the given schema using +the rules of the GraphQL specification. You can also run the validation step +manually by calling the :func:`graphql.validation.validate` function, passing +the schema and the AST document:: + + from graphql import parse, validate + + errors = validate(schema, parse(""" + { + human(id: NEWHOPE) { + name + homeTown + friends + } + } + """)) + +As a result, you will get a complete list of all errors that the validators +has found. In this case, we will get:: + + [GraphQLError( + "Expected type String!, found NEWHOPE.", + locations=[SourceLocation(line=3, column=17)]), + GraphQLError( + "Cannot query field 'homeTown' on type 'Human'." + " Did you mean 'homePlanet'?", + locations=[SourceLocation(line=5, column=9)]), + GraphQLError( + "Field 'friends' of type '[Character]' must have a" + " sub selection of subfields. Did you mean 'friends { ... }'?", + locations=[SourceLocation(line=6, column=9)])] + +These rules are implemented in the :mod:`graphql.validation.rules` module. +Instead of the default rules, you can also use a subset or create custom +rules. The rules are based on the :class:`graphql.validation.ValidationRule` +class which is based on the :class:`graphql.language.Visitor` class which +provides a way of walking through an AST document using the visitor pattern. diff --git a/graphql/__init__.py b/graphql/__init__.py new file mode 100644 index 00000000..85bd25b4 --- /dev/null +++ b/graphql/__init__.py @@ -0,0 +1,440 @@ +"""GraphQL-core-next + +The primary `graphql` package includes everything you need to define a GraphQL +schema and fulfill GraphQL requests. + +GraphQL-core-next provides a reference implementation for the GraphQL +specification but is also a useful utility for operating on GraphQL files +and building sophisticated tools. + +This top-level package exports a general purpose function for fulfilling all +steps of the GraphQL specification in a single operation, but also includes +utilities for every part of the GraphQL specification: + + - Parsing the GraphQL language. + - Building a GraphQL type schema. + - Validating a GraphQL request against a type schema. + - Executing a GraphQL request against a type schema. + +This also includes utility functions for operating on GraphQL types and +GraphQL documents to facilitate building tools. + +You may also import from each sub-package directly. For example, the +following two import statements are equivalent:: + + from graphql import parse + from graphql.language import parse + +The sub-packages of GraphQL-core-next are: + + - `graphql/language`: Parse and operate on the GraphQL language. + - `graphql/type`: Define GraphQL types and schema. + - `graphql/validation`: The Validation phase of fulfilling a GraphQL result. + - `graphql/execution`: The Execution phase of fulfilling a GraphQL request. + - `graphql/error`: Creating and format GraphQL errors. + - `graphql/utilities`: + Common useful computations upon the GraphQL language and type objects. + - `graphql/subscription`: Subscribe to data updates. +""" + +__version__ = '1.0.0rc1' +__version_js__ = '14.0.0rc2' + +# The primary entry point into fulfilling a GraphQL request. + +from .graphql import graphql, graphql_sync + +# Create and operate on GraphQL type definitions and schema. +from .type import ( + GraphQLSchema, + # Definitions + GraphQLScalarType, + GraphQLObjectType, + GraphQLInterfaceType, + GraphQLUnionType, + GraphQLEnumType, + GraphQLInputObjectType, + GraphQLList, + GraphQLNonNull, + GraphQLDirective, + # "Enum" of Type Kinds + TypeKind, + # Scalars + specified_scalar_types, + GraphQLInt, + GraphQLFloat, + GraphQLString, + GraphQLBoolean, + GraphQLID, + # Built-in Directives defined by the Spec + specified_directives, + GraphQLIncludeDirective, + GraphQLSkipDirective, + GraphQLDeprecatedDirective, + # Constant Deprecation Reason + DEFAULT_DEPRECATION_REASON, + # Meta-field definitions. + SchemaMetaFieldDef, + TypeMetaFieldDef, + TypeNameMetaFieldDef, + # GraphQL Types for introspection. + introspection_types, + # Predicates + is_schema, + is_directive, + is_type, + is_scalar_type, + is_object_type, + is_interface_type, + is_union_type, + is_enum_type, + is_input_object_type, + is_list_type, + is_non_null_type, + is_input_type, + is_output_type, + is_leaf_type, + is_composite_type, + is_abstract_type, + is_wrapping_type, + is_nullable_type, + is_named_type, + is_specified_scalar_type, + is_introspection_type, + is_specified_directive, + # Assertions + assert_type, + assert_scalar_type, + assert_object_type, + assert_interface_type, + assert_union_type, + assert_enum_type, + assert_input_object_type, + assert_list_type, + assert_non_null_type, + assert_input_type, + assert_output_type, + assert_leaf_type, + assert_composite_type, + assert_abstract_type, + assert_wrapping_type, + assert_nullable_type, + assert_named_type, + # Un-modifiers + get_nullable_type, + get_named_type, + # Validate GraphQL schema. + validate_schema, + assert_valid_schema, + # Types + GraphQLType, + GraphQLInputType, + GraphQLOutputType, + GraphQLLeafType, + GraphQLCompositeType, + GraphQLAbstractType, + GraphQLWrappingType, + GraphQLNullableType, + GraphQLNamedType, + Thunk, + GraphQLArgument, + GraphQLArgumentMap, + GraphQLEnumValue, + GraphQLEnumValueMap, + GraphQLField, + GraphQLFieldMap, + GraphQLFieldResolver, + GraphQLInputField, + GraphQLInputFieldMap, + GraphQLScalarSerializer, + GraphQLScalarValueParser, + GraphQLScalarLiteralParser, + GraphQLIsTypeOfFn, + GraphQLResolveInfo, + ResponsePath, + GraphQLTypeResolver) + +# Parse and operate on GraphQL language source files. +from .language import ( + Source, + get_location, + # Parse + parse, + parse_value, + parse_type, + # Print + print_ast, + # Visit + visit, + ParallelVisitor, + TypeInfoVisitor, + Visitor, + TokenKind, + DirectiveLocation, + BREAK, SKIP, REMOVE, IDLE, + # Types + Lexer, + SourceLocation, + # AST nodes + Location, + Token, + NameNode, + DocumentNode, + DefinitionNode, + ExecutableDefinitionNode, + OperationDefinitionNode, + OperationType, + VariableDefinitionNode, + VariableNode, + SelectionSetNode, + SelectionNode, + FieldNode, + ArgumentNode, + FragmentSpreadNode, + InlineFragmentNode, + FragmentDefinitionNode, + ValueNode, + IntValueNode, + FloatValueNode, + StringValueNode, + BooleanValueNode, + NullValueNode, + EnumValueNode, + ListValueNode, + ObjectValueNode, + ObjectFieldNode, + DirectiveNode, + TypeNode, + NamedTypeNode, + ListTypeNode, + NonNullTypeNode, + TypeSystemDefinitionNode, + SchemaDefinitionNode, + OperationTypeDefinitionNode, + TypeDefinitionNode, + ScalarTypeDefinitionNode, + ObjectTypeDefinitionNode, + FieldDefinitionNode, + InputValueDefinitionNode, + InterfaceTypeDefinitionNode, + UnionTypeDefinitionNode, + EnumTypeDefinitionNode, + EnumValueDefinitionNode, + InputObjectTypeDefinitionNode, + DirectiveDefinitionNode, + TypeSystemExtensionNode, + SchemaExtensionNode, + TypeExtensionNode, + ScalarTypeExtensionNode, + ObjectTypeExtensionNode, + InterfaceTypeExtensionNode, + UnionTypeExtensionNode, + EnumTypeExtensionNode, + InputObjectTypeExtensionNode) + +# Execute GraphQL queries. +from .execution import ( + execute, + default_field_resolver, + response_path_as_list, + get_directive_values, + # Types + ExecutionContext, + ExecutionResult) + +from .subscription import ( + subscribe, create_source_event_stream) + + +# Validate GraphQL queries. +from .validation import ( + validate, + ValidationContext, + # All validation rules in the GraphQL Specification. + specified_rules, + # Individual validation rules. + FieldsOnCorrectTypeRule, + FragmentsOnCompositeTypesRule, + KnownArgumentNamesRule, + KnownDirectivesRule, + KnownFragmentNamesRule, + KnownTypeNamesRule, + LoneAnonymousOperationRule, + NoFragmentCyclesRule, + NoUndefinedVariablesRule, + NoUnusedFragmentsRule, + NoUnusedVariablesRule, + OverlappingFieldsCanBeMergedRule, + PossibleFragmentSpreadsRule, + ProvidedRequiredArgumentsRule, + ScalarLeafsRule, + SingleFieldSubscriptionsRule, + UniqueArgumentNamesRule, + UniqueDirectivesPerLocationRule, + UniqueFragmentNamesRule, + UniqueInputFieldNamesRule, + UniqueOperationNamesRule, + UniqueVariableNamesRule, + ValuesOfCorrectTypeRule, + VariablesAreInputTypesRule, + VariablesInAllowedPositionRule) + +# Create, format, and print GraphQL errors. +from .error import ( + GraphQLError, format_error, print_error) + +# Utilities for operating on GraphQL type schema and parsed sources. +from .utilities import ( + # Produce the GraphQL query recommended for a full schema introspection. + # Accepts optional IntrospectionOptions. + get_introspection_query, + # Gets the target Operation from a Document + get_operation_ast, + # Gets the Type for the target Operation AST. + get_operation_root_type, + # Convert a GraphQLSchema to an IntrospectionQuery + introspection_from_schema, + # Build a GraphQLSchema from an introspection result. + build_client_schema, + # Build a GraphQLSchema from a parsed GraphQL Schema language AST. + build_ast_schema, + # Build a GraphQLSchema from a GraphQL schema language document. + build_schema, + # @deprecated: Get the description from a schema AST node. + get_description, + # Extends an existing GraphQLSchema from a parsed GraphQL Schema + # language AST. + extend_schema, + # Sort a GraphQLSchema. + lexicographic_sort_schema, + # Print a GraphQLSchema to GraphQL Schema language. + print_schema, + # Prints the built-in introspection schema in the Schema Language + # format. + print_introspection_schema, + # Print a GraphQLType to GraphQL Schema language. + print_type, + # Create a GraphQLType from a GraphQL language AST. + type_from_ast, + # Create a Python value from a GraphQL language AST with a Type. + value_from_ast, + # Create a Python value from a GraphQL language AST without a Type. + value_from_ast_untyped, + # Create a GraphQL language AST from a Python value. + ast_from_value, + # A helper to use within recursive-descent visitors which need to be aware + # of the GraphQL type system. + TypeInfo, + # Coerces a Python value to a GraphQL type, or produces errors. + coerce_value, + # Concatenates multiple AST together. + concat_ast, + # Separates an AST into an AST per Operation. + separate_operations, + # Comparators for types + is_equal_type, + is_type_sub_type_of, + do_types_overlap, + # Asserts a string is a valid GraphQL name. + assert_valid_name, + # Determine if a string is a valid GraphQL name. + is_valid_name_error, + # Compares two GraphQLSchemas and detects breaking changes. + find_breaking_changes, find_dangerous_changes, + BreakingChange, BreakingChangeType, + DangerousChange, DangerousChangeType) + +__all__ = [ + 'graphql', 'graphql_sync', + 'GraphQLSchema', + 'GraphQLScalarType', 'GraphQLObjectType', 'GraphQLInterfaceType', + 'GraphQLUnionType', 'GraphQLEnumType', 'GraphQLInputObjectType', + 'GraphQLList', 'GraphQLNonNull', 'GraphQLDirective', + 'TypeKind', + 'specified_scalar_types', + 'GraphQLInt', 'GraphQLFloat', 'GraphQLString', 'GraphQLBoolean', + 'GraphQLID', + 'specified_directives', + 'GraphQLIncludeDirective', 'GraphQLSkipDirective', + 'GraphQLDeprecatedDirective', + 'DEFAULT_DEPRECATION_REASON', + 'SchemaMetaFieldDef', 'TypeMetaFieldDef', 'TypeNameMetaFieldDef', + 'introspection_types', 'is_schema', 'is_directive', 'is_type', + 'is_scalar_type', 'is_object_type', 'is_interface_type', + 'is_union_type', 'is_enum_type', 'is_input_object_type', + 'is_list_type', 'is_non_null_type', 'is_input_type', 'is_output_type', + 'is_leaf_type', 'is_composite_type', 'is_abstract_type', + 'is_wrapping_type', 'is_nullable_type', 'is_named_type', + 'is_specified_scalar_type', 'is_introspection_type', + 'is_specified_directive', + 'assert_type', 'assert_scalar_type', 'assert_object_type', + 'assert_interface_type', 'assert_union_type', 'assert_enum_type', + 'assert_input_object_type', 'assert_list_type', 'assert_non_null_type', + 'assert_input_type', 'assert_output_type', 'assert_leaf_type', + 'assert_composite_type', 'assert_abstract_type', 'assert_wrapping_type', + 'assert_nullable_type', 'assert_named_type', + 'get_nullable_type', 'get_named_type', + 'validate_schema', 'assert_valid_schema', + 'GraphQLType', 'GraphQLInputType', 'GraphQLOutputType', 'GraphQLLeafType', + 'GraphQLCompositeType', 'GraphQLAbstractType', + 'GraphQLWrappingType', 'GraphQLNullableType', 'GraphQLNamedType', + 'Thunk', 'GraphQLArgument', 'GraphQLArgumentMap', + 'GraphQLEnumValue', 'GraphQLEnumValueMap', + 'GraphQLField', 'GraphQLFieldMap', 'GraphQLFieldResolver', + 'GraphQLInputField', 'GraphQLInputFieldMap', + 'GraphQLScalarSerializer', 'GraphQLScalarValueParser', + 'GraphQLScalarLiteralParser', 'GraphQLIsTypeOfFn', + 'GraphQLResolveInfo', 'ResponsePath', 'GraphQLTypeResolver', + 'Source', 'get_location', + 'parse', 'parse_value', 'parse_type', + 'print_ast', 'visit', 'ParallelVisitor', 'TypeInfoVisitor', 'Visitor', + 'TokenKind', 'DirectiveLocation', 'BREAK', 'SKIP', 'REMOVE', 'IDLE', + 'Lexer', 'SourceLocation', 'Location', 'Token', + 'NameNode', 'DocumentNode', 'DefinitionNode', 'ExecutableDefinitionNode', + 'OperationDefinitionNode', 'OperationType', 'VariableDefinitionNode', + 'VariableNode', 'SelectionSetNode', 'SelectionNode', 'FieldNode', + 'ArgumentNode', 'FragmentSpreadNode', 'InlineFragmentNode', + 'FragmentDefinitionNode', 'ValueNode', 'IntValueNode', 'FloatValueNode', + 'StringValueNode', 'BooleanValueNode', 'NullValueNode', 'EnumValueNode', + 'ListValueNode', 'ObjectValueNode', 'ObjectFieldNode', 'DirectiveNode', + 'TypeNode', 'NamedTypeNode', 'ListTypeNode', 'NonNullTypeNode', + 'TypeSystemDefinitionNode', 'SchemaDefinitionNode', + 'OperationTypeDefinitionNode', 'TypeDefinitionNode', + 'ScalarTypeDefinitionNode', 'ObjectTypeDefinitionNode', + 'FieldDefinitionNode', 'InputValueDefinitionNode', + 'InterfaceTypeDefinitionNode', 'UnionTypeDefinitionNode', + 'EnumTypeDefinitionNode', 'EnumValueDefinitionNode', + 'InputObjectTypeDefinitionNode', 'DirectiveDefinitionNode', + 'TypeSystemExtensionNode', 'SchemaExtensionNode', 'TypeExtensionNode', + 'ScalarTypeExtensionNode', 'ObjectTypeExtensionNode', + 'InterfaceTypeExtensionNode', 'UnionTypeExtensionNode', + 'EnumTypeExtensionNode', 'InputObjectTypeExtensionNode', + 'execute', 'default_field_resolver', 'response_path_as_list', + 'get_directive_values', 'ExecutionContext', 'ExecutionResult', + 'subscribe', 'create_source_event_stream', + 'validate', 'ValidationContext', + 'specified_rules', + 'FieldsOnCorrectTypeRule', 'FragmentsOnCompositeTypesRule', + 'KnownArgumentNamesRule', 'KnownDirectivesRule', 'KnownFragmentNamesRule', + 'KnownTypeNamesRule', 'LoneAnonymousOperationRule', 'NoFragmentCyclesRule', + 'NoUndefinedVariablesRule', 'NoUnusedFragmentsRule', + 'NoUnusedVariablesRule', 'OverlappingFieldsCanBeMergedRule', + 'PossibleFragmentSpreadsRule', 'ProvidedRequiredArgumentsRule', + 'ScalarLeafsRule', 'SingleFieldSubscriptionsRule', + 'UniqueArgumentNamesRule', 'UniqueDirectivesPerLocationRule', + 'UniqueFragmentNamesRule', 'UniqueInputFieldNamesRule', + 'UniqueOperationNamesRule', 'UniqueVariableNamesRule', + 'ValuesOfCorrectTypeRule', 'VariablesAreInputTypesRule', + 'VariablesInAllowedPositionRule', + 'GraphQLError', 'format_error', 'print_error', + 'get_introspection_query', 'get_operation_ast', 'get_operation_root_type', + 'introspection_from_schema', 'build_client_schema', 'build_ast_schema', + 'build_schema', 'get_description', 'extend_schema', + 'lexicographic_sort_schema', 'print_schema', 'print_introspection_schema', + 'print_type', 'type_from_ast', 'value_from_ast', 'value_from_ast_untyped', + 'ast_from_value', 'TypeInfo', 'coerce_value', 'concat_ast', + 'separate_operations', 'is_equal_type', 'is_type_sub_type_of', + 'do_types_overlap', 'assert_valid_name', 'is_valid_name_error', + 'find_breaking_changes', 'find_dangerous_changes', + 'BreakingChange', 'BreakingChangeType', + 'DangerousChange', 'DangerousChangeType'] diff --git a/graphql/error/__init__.py b/graphql/error/__init__.py new file mode 100644 index 00000000..7b834b25 --- /dev/null +++ b/graphql/error/__init__.py @@ -0,0 +1,16 @@ +"""GraphQL Errors + +The `graphql.error` package is responsible for creating and formatting +GraphQL errors. +""" + +from .graphql_error import GraphQLError +from .syntax_error import GraphQLSyntaxError +from .located_error import located_error +from .print_error import print_error +from .format_error import format_error +from .invalid import INVALID, InvalidType + +__all__ = [ + 'INVALID', 'InvalidType', 'GraphQLError', 'GraphQLSyntaxError', + 'format_error', 'print_error', 'located_error'] diff --git a/graphql/error/format_error.py b/graphql/error/format_error.py new file mode 100644 index 00000000..20cc1cf6 --- /dev/null +++ b/graphql/error/format_error.py @@ -0,0 +1,23 @@ +from typing import Any, Dict, TYPE_CHECKING + +if TYPE_CHECKING: + from .graphql_error import GraphQLError # noqa: F401 + + +__all__ = ['format_error'] + + +def format_error(error: 'GraphQLError') -> dict: + """Format a GraphQL error + + Given a GraphQLError, format it according to the rules described by the + Response Format, Errors section of the GraphQL Specification. + """ + if not error: + raise ValueError('Received null or undefined error.') + formatted: Dict[str, Any] = dict( # noqa: E701 (pycqa/flake8#394) + message=error.message or 'An unknown error occurred.', + locations=error.locations, path=error.path) + if error.extensions: + formatted.update(extensions=error.extensions) + return formatted diff --git a/graphql/error/graphql_error.py b/graphql/error/graphql_error.py new file mode 100644 index 00000000..dbee512b --- /dev/null +++ b/graphql/error/graphql_error.py @@ -0,0 +1,142 @@ +from typing import Any, Dict, List, Optional, Sequence, Union, TYPE_CHECKING + +from .format_error import format_error +from .print_error import print_error + +if TYPE_CHECKING: + from ..language.ast import Node # noqa + from ..language.location import SourceLocation # noqa + from ..language.source import Source # noqa + +__all__ = ['GraphQLError'] + + +class GraphQLError(Exception): + """GraphQL Error + + A GraphQLError describes an Error found during the parse, validate, or + execute phases of performing a GraphQL operation. In addition to a message, + it also includes information about the locations in a GraphQL document + and/or execution result that correspond to the Error. + """ + + message: str + """A message describing the Error for debugging purposes + + Note: should be treated as readonly, despite invariant usage. + """ + + locations: Optional[List['SourceLocation']] + """Source locations + + A list of (line, column) locations within the source + GraphQL document which correspond to this error. + + Errors during validation often contain multiple locations, for example + to point out two things with the same name. Errors during execution + include a single location, the field which produced the error. + """ + + path: Optional[List[Union[str, int]]] + """A list of GraphQL AST Nodes corresponding to this error""" + + nodes: Optional[List['Node']] + """The source GraphQL document for the first location of this error + + Note that if this Error represents more than one node, the source + may not represent nodes after the first node. + """ + + source: Optional['Source'] + """The source GraphQL document for the first location of this error + + Note that if this Error represents more than one node, the source may + not represent nodes after the first node. + """ + + positions: Optional[Sequence[int]] + """Error positions + + A list of character offsets within the source GraphQL document + which correspond to this error. + """ + + original_error: Optional[Exception] + """The original error thrown from a field resolver during execution""" + + extensions: Optional[Dict[str, Any]] + """Extension fields to add to the formatted error""" + + __slots__ = ('message', 'nodes', 'source', 'positions', 'locations', + 'path', 'original_error', 'extensions') + + def __init__(self, message: str, + nodes: Union[Sequence['Node'], 'Node']=None, + source: 'Source'=None, + positions: Sequence[int]=None, + path: Sequence[Union[str, int]]=None, + original_error: Exception=None, + extensions: Dict[str, Any]=None) -> None: + super(GraphQLError, self).__init__(message) + self.message = message + if nodes and not isinstance(nodes, list): + nodes = [nodes] # type: ignore + self.nodes = nodes or None # type: ignore + self.source = source + if not source and nodes: + node = nodes[0] # type: ignore + if node and node.loc and node.loc.source: + self.source = node.loc.source + if not positions and nodes: + positions = [node.loc.start + for node in nodes if node.loc] # type: ignore + self.positions = positions or None + if positions and source: + locations: Optional[List['SourceLocation']] = [ + source.get_location(pos) for pos in positions] + elif nodes: + locations = [node.loc.source.get_location(node.loc.start) + for node in nodes if node.loc] # type: ignore + else: + locations = None + self.locations = locations + if path and not isinstance(path, list): + path = list(path) + self.path = path or None # type: ignore + self.original_error = original_error + if not extensions and original_error: + try: + extensions = original_error.extensions # type: ignore + except AttributeError: + pass + self.extensions = extensions or {} + + def __str__(self): + return print_error(self) + + def __repr__(self): + args = [repr(self.message)] + if self.locations: + args.append(f'locations={self.locations!r}') + if self.path: + args.append(f'path={self.path!r}') + if self.extensions: + args.append(f'extensions={self.extensions!r}') + return f"{self.__class__.__name__}({', '.join(args)})" + + def __eq__(self, other): + return (isinstance(other, GraphQLError) and + self.__class__ == other.__class__ and + all(getattr(self, slot) == getattr(other, slot) + for slot in self.__slots__)) or ( + isinstance(other, dict) and 'message' in other and + all(slot in self.__slots__ and + getattr(self, slot) == other.get(slot) for slot in other)) + + def __ne__(self, other): + return not self.__eq__(other) + + @property + def formatted(self): + """Get error formatted according to the specification.""" + return format_error(self) diff --git a/graphql/error/invalid.py b/graphql/error/invalid.py new file mode 100644 index 00000000..a7000a13 --- /dev/null +++ b/graphql/error/invalid.py @@ -0,0 +1,24 @@ +__all__ = ['INVALID', 'InvalidType'] + + +class InvalidType(ValueError): + """Auxiliary class for creating the INVALID singleton.""" + + def __repr__(self): + return '' + + def __str__(self): + return 'INVALID' + + def __bool__(self): + return False + + def __eq__(self, other): + return other is INVALID + + def __ne__(self, other): + return not self.__eq__(other) + + +# Used to indicate invalid values (like "undefined" in GraphQL.js): +INVALID = InvalidType() diff --git a/graphql/error/located_error.py b/graphql/error/located_error.py new file mode 100644 index 00000000..5bbf23ed --- /dev/null +++ b/graphql/error/located_error.py @@ -0,0 +1,45 @@ +from typing import TYPE_CHECKING, Sequence, Union + +from .graphql_error import GraphQLError + +if TYPE_CHECKING: + from ..language.ast import Node # noqa + +__all__ = ['located_error'] + + +def located_error(original_error: Union[Exception, GraphQLError], + nodes: Sequence['Node'], + path: Sequence[Union[str, int]]) -> GraphQLError: + """Located GraphQL Error + + Given an arbitrary Error, presumably thrown while attempting to execute a + GraphQL operation, produce a new GraphQLError aware of the location in the + document responsible for the original Error. + """ + if original_error: + # Note: this uses a brand-check to support GraphQL errors originating + # from other contexts. + try: + if isinstance(original_error.path, list): # type: ignore + return original_error # type: ignore + except AttributeError: + pass + try: + message = original_error.message # type: ignore + except AttributeError: + message = str(original_error) + try: + source = original_error.source # type: ignore + except AttributeError: + source = None + try: + positions = original_error.positions # type: ignore + except AttributeError: + positions = None + try: + nodes = original_error.nodes or nodes # type: ignore + except AttributeError: + pass + return GraphQLError( + message, nodes, source, positions, path, original_error) diff --git a/graphql/error/print_error.py b/graphql/error/print_error.py new file mode 100644 index 00000000..5379d46b --- /dev/null +++ b/graphql/error/print_error.py @@ -0,0 +1,78 @@ +import re +from functools import reduce +from typing import List, Optional, Tuple, TYPE_CHECKING + +if TYPE_CHECKING: + from .graphql_error import GraphQLError # noqa: F401 + from ..language import Source, SourceLocation # noqa: F401 + + +__all__ = ['print_error'] + + +def print_error(error: 'GraphQLError') -> str: + """Print a GraphQLError to a string. + + The printed string will contain useful location information about the + error's position in the source. + """ + printed_locations: List[str] = [] + print_location = printed_locations.append + if error.nodes: + for node in error.nodes: + if node.loc: + print_location(highlight_source_at_location( + node.loc.source, + node.loc.source.get_location(node.loc.start))) + elif error.source and error.locations: + source = error.source + for location in error.locations: + print_location(highlight_source_at_location(source, location)) + if printed_locations: + return '\n\n'.join([error.message] + printed_locations) + '\n' + return error.message + + +_re_newline = re.compile(r'\r\n|[\n\r]') + + +def highlight_source_at_location( + source: 'Source', location: 'SourceLocation') -> str: + """Highlight source at given location. + + This renders a helpful description of the location of the error in the + GraphQL Source document. + """ + first_line_column_offset = source.location_offset.column - 1 + body = ' ' * first_line_column_offset + source.body + + line_index = location.line - 1 + line_offset = source.location_offset.line - 1 + line_num = location.line + line_offset + + column_offset = first_line_column_offset if location.line == 1 else 0 + column_num = location.column + column_offset + + lines = _re_newline.split(body) # works a bit different from splitlines() + len_lines = len(lines) + + def get_line(index: int) -> Optional[str]: + return lines[index] if 0 <= index < len_lines else None + + return ( + f'{source.name} ({line_num}:{column_num})\n' + + print_prefixed_lines([ + (f'{line_num - 1}: ', get_line(line_index - 1)), + (f'{line_num}: ', get_line(line_index)), + ('', ' ' * (column_num - 1) + '^'), + (f'{line_num + 1}: ', get_line(line_index + 1))])) + + +def print_prefixed_lines(lines: List[Tuple[str, Optional[str]]]) -> str: + """Print lines specified like this: ["prefix", "string"]""" + existing_lines = [line for line in lines if line[1] is not None] + pad_len = reduce( + lambda pad, line: max(pad, len(line[0])), existing_lines, 0) + return '\n'.join(map( + lambda line: line[0].rjust(pad_len) + line[1], # type:ignore + existing_lines)) diff --git a/graphql/error/syntax_error.py b/graphql/error/syntax_error.py new file mode 100644 index 00000000..acac11a4 --- /dev/null +++ b/graphql/error/syntax_error.py @@ -0,0 +1,12 @@ +from .graphql_error import GraphQLError + +__all__ = ['GraphQLSyntaxError'] + + +class GraphQLSyntaxError(GraphQLError): + """A GraphQLError representing a syntax error.""" + + def __init__(self, source, position, description): + super().__init__(f'Syntax Error: {description}', + source=source, positions=[position]) + self.description = description diff --git a/graphql/execution/__init__.py b/graphql/execution/__init__.py new file mode 100644 index 00000000..10398898 --- /dev/null +++ b/graphql/execution/__init__.py @@ -0,0 +1,15 @@ +"""GraphQL Execution + +The `graphql.execution` package is responsible for the execution phase +of fulfilling a GraphQL request. +""" + +from .execute import ( + execute, default_field_resolver, response_path_as_list, + ExecutionContext, ExecutionResult) +from .values import get_directive_values + +__all__ = [ + 'execute', 'default_field_resolver', 'response_path_as_list', + 'ExecutionContext', 'ExecutionResult', + 'get_directive_values'] diff --git a/graphql/execution/execute.py b/graphql/execution/execute.py new file mode 100644 index 00000000..e8808de1 --- /dev/null +++ b/graphql/execution/execute.py @@ -0,0 +1,951 @@ +from inspect import isawaitable +from typing import ( + Any, Awaitable, Dict, Iterable, List, NamedTuple, Optional, Set, Union, + Tuple, cast) + +from ..error import GraphQLError, INVALID, located_error +from ..language import ( + DocumentNode, FieldNode, FragmentDefinitionNode, + FragmentSpreadNode, InlineFragmentNode, OperationDefinitionNode, + OperationType, SelectionSetNode) +from ..pyutils import is_invalid, is_nullish, MaybeAwaitable +from ..utilities import get_operation_root_type, type_from_ast +from ..type import ( + GraphQLAbstractType, GraphQLField, GraphQLIncludeDirective, + GraphQLLeafType, GraphQLList, GraphQLNonNull, GraphQLObjectType, + GraphQLOutputType, GraphQLSchema, GraphQLSkipDirective, + GraphQLFieldResolver, GraphQLResolveInfo, ResponsePath, + SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, + assert_valid_schema, is_abstract_type, is_leaf_type, is_list_type, + is_non_null_type, is_object_type) +from .values import ( + get_argument_values, get_directive_values, get_variable_values) + +__all__ = [ + 'add_path', 'assert_valid_execution_arguments', 'default_field_resolver', + 'execute', 'get_field_def', 'response_path_as_list', + 'ExecutionResult', 'ExecutionContext'] + + +# Terminology +# +# "Definitions" are the generic name for top-level statements in the document. +# Examples of this include: +# 1) Operations (such as a query) +# 2) Fragments +# +# "Operations" are a generic name for requests in the document. +# Examples of this include: +# 1) query, +# 2) mutation +# +# "Selections" are the definitions that can appear legally and at +# single level of the query. These include: +# 1) field references e.g "a" +# 2) fragment "spreads" e.g. "...c" +# 3) inline fragment "spreads" e.g. "...on Type { a }" + + +class ExecutionResult(NamedTuple): + """The result of GraphQL execution. + + - `data` is the result of a successful execution of the query. + - `errors` is included when any errors occurred as a non-empty list. + """ + + data: Optional[Dict[str, Any]] + errors: Optional[List[GraphQLError]] + + +ExecutionResult.__new__.__defaults__ = (None, None) # type: ignore + + +def execute( + schema: GraphQLSchema, document: DocumentNode, + root_value: Any=None, context_value: Any=None, + variable_values: Dict[str, Any]=None, + operation_name: str=None, field_resolver: GraphQLFieldResolver=None + ) -> MaybeAwaitable[ExecutionResult]: + """Execute a GraphQL operation. + + Implements the "Evaluating requests" section of the GraphQL specification. + + Returns an ExecutionResult (if all encountered resolvers are synchronous), + or a coroutine object eventually yielding an ExecutionResult. + + If the arguments to this function do not result in a legal execution + context, a GraphQLError will be thrown immediately explaining the invalid + input. + """ + # If arguments are missing or incorrect, throw an error. + assert_valid_execution_arguments(schema, document, variable_values) + + # If a valid execution context cannot be created due to incorrect + # arguments, a "Response" with only errors is returned. + exe_context = ExecutionContext.build( + schema, document, root_value, context_value, + variable_values, operation_name, field_resolver) + + # Return early errors if execution context failed. + if isinstance(exe_context, list): + return ExecutionResult(data=None, errors=exe_context) + + # Return a possible coroutine object that will eventually yield the data + # described by the "Response" section of the GraphQL specification. + # + # If errors are encountered while executing a GraphQL field, only that + # field and its descendants will be omitted, and sibling fields will still + # be executed. An execution which encounters errors will still result in a + # coroutine object that can be executed without errors. + + data = exe_context.execute_operation(exe_context.operation, root_value) + return exe_context.build_response(data) + + +class ExecutionContext: + """Data that must be available at all points during query execution. + + Namely, schema of the type system that is currently executing, + and the fragments defined in the query document. + """ + + schema: GraphQLSchema + fragments: Dict[str, FragmentDefinitionNode] + root_value: Any + context_value: Any + operation: OperationDefinitionNode + variable_values: Dict[str, Any] + field_resolver: GraphQLFieldResolver + errors: List[GraphQLError] + + def __init__( + self, schema: GraphQLSchema, + fragments: Dict[str, FragmentDefinitionNode], + root_value: Any, context_value: Any, + operation: OperationDefinitionNode, + variable_values: Dict[str, Any], + field_resolver: GraphQLFieldResolver, + errors: List[GraphQLError]) -> None: + self.schema = schema + self.fragments = fragments + self.root_value = root_value + self.context_value = context_value + self.operation = operation + self.variable_values = variable_values + self.field_resolver = field_resolver # type: ignore + self.errors = errors + self._subfields_cache: Dict[ + Tuple[GraphQLObjectType, Tuple[FieldNode, ...]], + Dict[str, List[FieldNode]]] = {} + + @classmethod + def build( + cls, schema: GraphQLSchema, document: DocumentNode, + root_value: Any=None, context_value: Any=None, + raw_variable_values: Dict[str, Any]=None, + operation_name: str=None, + field_resolver: GraphQLFieldResolver=None + ) -> Union[List[GraphQLError], 'ExecutionContext']: + """Build an execution context + + Constructs a ExecutionContext object from the arguments passed to + execute, which we will pass throughout the other execution methods. + + Throws a GraphQLError if a valid execution context cannot be created. + """ + errors: List[GraphQLError] = [] + operation: Optional[OperationDefinitionNode] = None + has_multiple_assumed_operations = False + fragments: Dict[str, FragmentDefinitionNode] = {} + for definition in document.definitions: + if isinstance(definition, OperationDefinitionNode): + if not operation_name and operation: + has_multiple_assumed_operations = True + elif (not operation_name or ( + definition.name and + definition.name.value == operation_name)): + operation = definition + elif isinstance(definition, FragmentDefinitionNode): + fragments[definition.name.value] = definition + + if not operation: + if operation_name: + errors.append(GraphQLError( + f"Unknown operation named '{operation_name}'.")) + else: + errors.append(GraphQLError('Must provide an operation.')) + elif has_multiple_assumed_operations: + errors.append(GraphQLError( + 'Must provide operation name' + ' if query contains multiple operations.')) + + variable_values = None + if operation: + coerced_variable_values = get_variable_values( + schema, + operation.variable_definitions or [], + raw_variable_values or {}) + + if coerced_variable_values.errors: + errors.extend(coerced_variable_values.errors) + else: + variable_values = coerced_variable_values.coerced + + if errors: + return errors + + if operation is None: + raise TypeError('Has operation if no errors.') + if variable_values is None: + raise TypeError('Has variables if no errors.') + + return cls( + schema, fragments, root_value, context_value, operation, + variable_values, field_resolver or default_field_resolver, errors) + + def build_response( + self, data: MaybeAwaitable[Optional[Dict[str, Any]]] + ) -> MaybeAwaitable[ExecutionResult]: + """Build response. + + Given a completed execution context and data, build the (data, errors) + response defined by the "Response" section of the GraphQL spec. + """ + if isawaitable(data): + async def build_response_async(): + return self.build_response(await data) + return build_response_async() + data = cast(Optional[Dict[str, Any]], data) + return ExecutionResult(data=data, errors=self.errors or None) + + def execute_operation( + self, operation: OperationDefinitionNode, + root_value: Any) -> Optional[MaybeAwaitable[Any]]: + """Execute an operation. + + Implements the "Evaluating operations" section of the spec. + """ + type_ = get_operation_root_type(self.schema, operation) + fields = self.collect_fields(type_, operation.selection_set, {}, set()) + + path = None + + # Errors from sub-fields of a NonNull type may propagate to the top + # level, at which point we still log the error and null the parent + # field, which in this case is the entire response. + # + # Similar to complete_value_catching_error. + try: + result = (self.execute_fields_serially + if operation.operation == OperationType.MUTATION + else self.execute_fields + )(type_, root_value, path, fields) + except GraphQLError as error: + self.errors.append(error) + return None + except Exception as error: + error = GraphQLError(str(error), original_error=error) + self.errors.append(error) + return None + else: + if isawaitable(result): + # noinspection PyShadowingNames + async def await_result(): + try: + return await result + except GraphQLError as error: + self.errors.append(error) + except Exception as error: + error = GraphQLError(str(error), original_error=error) + self.errors.append(error) + return await_result() + return result + + def execute_fields_serially( + self, parent_type: GraphQLObjectType, source_value: Any, + path: Optional[ResponsePath], fields: Dict[str, List[FieldNode]] + ) -> MaybeAwaitable[Dict[str, Any]]: + """Execute the given fields serially. + + Implements the "Evaluating selection sets" section of the spec + for "write" mode. + """ + results: Dict[str, Any] = {} + for response_name, field_nodes in fields.items(): + field_path = add_path(path, response_name) + result = self.resolve_field( + parent_type, source_value, field_nodes, field_path) + if result is INVALID: + continue + if isawaitable(results): + # noinspection PyShadowingNames + async def await_and_set_result(results, response_name, result): + awaited_results = await results + awaited_results[response_name] = ( + await result if isawaitable(result) + else result) + return awaited_results + results = await_and_set_result( + cast(Awaitable, results), response_name, result) + elif isawaitable(result): + # noinspection PyShadowingNames + async def set_result(results, response_name, result): + results[response_name] = await result + return results + results = set_result(results, response_name, result) + else: + results[response_name] = result + if isawaitable(results): + # noinspection PyShadowingNames + async def get_results(): + return await cast(Awaitable, results) + return get_results() + return results + + def execute_fields( + self, parent_type: GraphQLObjectType, + source_value: Any, path: Optional[ResponsePath], + fields: Dict[str, List[FieldNode]] + ) -> MaybeAwaitable[Dict[str, Any]]: + """Execute the given fields concurrently. + + Implements the "Evaluating selection sets" section of the spec + for "read" mode. + """ + is_async = False + + results = {} + for response_name, field_nodes in fields.items(): + field_path = add_path(path, response_name) + result = self.resolve_field( + parent_type, source_value, field_nodes, field_path) + if result is not INVALID: + results[response_name] = result + if not is_async and isawaitable(result): + is_async = True + + # If there are no coroutines, we can just return the object + if not is_async: + return results + + # Otherwise, results is a map from field name to the result of + # resolving that field, which is possibly a coroutine object. + # Return a coroutine object that will yield this same map, but with + # any coroutines awaited and replaced with the values they yielded. + async def get_results(): + return {key: await value if isawaitable(value) else value + for key, value in results.items()} + return get_results() + + def collect_fields( + self, runtime_type: GraphQLObjectType, + selection_set: SelectionSetNode, + fields: Dict[str, List[FieldNode]], + visited_fragment_names: Set[str]) -> Dict[str, List[FieldNode]]: + """Collect fields. + + Given a selection_set, adds all of the fields in that selection to + the passed in map of fields, and returns it at the end. + + collect_fields requires the "runtime type" of an object. For a field + which returns an Interface or Union type, the "runtime type" will be + the actual Object type returned by that field. + """ + for selection in selection_set.selections: + if isinstance(selection, FieldNode): + if not self.should_include_node(selection): + continue + name = get_field_entry_key(selection) + fields.setdefault(name, []).append(selection) + elif isinstance(selection, InlineFragmentNode): + if (not self.should_include_node(selection) or + not self.does_fragment_condition_match( + selection, runtime_type)): + continue + self.collect_fields( + runtime_type, selection.selection_set, + fields, visited_fragment_names) + elif isinstance(selection, FragmentSpreadNode): + frag_name = selection.name.value + if (frag_name in visited_fragment_names or + not self.should_include_node(selection)): + continue + visited_fragment_names.add(frag_name) + fragment = self.fragments.get(frag_name) + if (not fragment or + not self.does_fragment_condition_match( + fragment, runtime_type)): + continue + self.collect_fields( + runtime_type, fragment.selection_set, + fields, visited_fragment_names) + return fields + + def should_include_node( + self, node: Union[ + FragmentSpreadNode, FieldNode, InlineFragmentNode]) -> bool: + """Check if node should be included + + Determines if a field should be included based on the @include and + @skip directives, where @skip has higher precedence than @include. + """ + skip = get_directive_values( + GraphQLSkipDirective, node, self.variable_values) + if skip and skip['if']: + return False + + include = get_directive_values( + GraphQLIncludeDirective, node, self.variable_values) + if include and not include['if']: + return False + + return True + + def does_fragment_condition_match( + self, fragment: Union[FragmentDefinitionNode, InlineFragmentNode], + type_: GraphQLObjectType) -> bool: + """Determine if a fragment is applicable to the given type.""" + type_condition_node = fragment.type_condition + if not type_condition_node: + return True + conditional_type = type_from_ast(self.schema, type_condition_node) + if conditional_type is type_: + return True + if is_abstract_type(conditional_type): + return self.schema.is_possible_type( + cast(GraphQLAbstractType, conditional_type), type_) + return False + + def build_resolve_info( + self, field_def: GraphQLField, field_nodes: List[FieldNode], + parent_type: GraphQLObjectType, path: ResponsePath + ) -> GraphQLResolveInfo: + # The resolve function's first argument is a collection of + # information about the current execution state. + return GraphQLResolveInfo( + field_nodes[0].name.value, field_nodes, field_def.type, + parent_type, path, self.schema, self.fragments, self.root_value, + self.operation, self.variable_values, self.context_value) + + def resolve_field( + self, parent_type: GraphQLObjectType, source: Any, + field_nodes: List[FieldNode], path: ResponsePath + ) -> MaybeAwaitable[Any]: + """Resolve the field on the given source object. + + In particular, this figures out the value that the field returns + by calling its resolve function, then calls complete_value to await + coroutine objects, serialize scalars, or execute the sub-selection-set + for objects. + """ + field_node = field_nodes[0] + field_name = field_node.name.value + + field_def = get_field_def(self.schema, parent_type, field_name) + if not field_def: + return INVALID + + resolve_fn = field_def.resolve or self.field_resolver + + info = self.build_resolve_info( + field_def, field_nodes, parent_type, path) + + # Get the resolve function, regardless of if its result is normal + # or abrupt (error). + result = self.resolve_field_value_or_error( + field_def, field_nodes, resolve_fn, source, info) + + return self.complete_value_catching_error( + field_def.type, field_nodes, info, path, result) + + def resolve_field_value_or_error( + self, field_def: GraphQLField, field_nodes: List[FieldNode], + resolve_fn: GraphQLFieldResolver, source: Any, + info: GraphQLResolveInfo) -> Union[Exception, Any]: + try: + # Build a dictionary of arguments from the field.arguments AST, + # using the variables scope to fulfill any variable references. + args = get_argument_values( + field_def, field_nodes[0], self.variable_values) + + # Note that contrary to the JavaScript implementation, + # we pass the context value as part of the resolve info. + result = resolve_fn(source, info, **args) + if isawaitable(result): + # noinspection PyShadowingNames + async def await_result(): + try: + return await result + except GraphQLError as error: + return error + except Exception as error: + return GraphQLError( + str(error), original_error=error) + return await_result() + return result + except GraphQLError as error: + return error + except Exception as error: + return GraphQLError(str(error), original_error=error) + + def complete_value_catching_error( + self, return_type: GraphQLOutputType, field_nodes: List[FieldNode], + info: GraphQLResolveInfo, path: ResponsePath, result: Any + ) -> MaybeAwaitable[Any]: + """Complete a value while catching an error. + + This is a small wrapper around completeValue which detects and logs + errors in the execution context. + """ + try: + if isawaitable(result): + async def await_result(): + value = self.complete_value( + return_type, field_nodes, info, path, await result) + if isawaitable(value): + return await value + return value + completed = await_result() + else: + completed = self.complete_value( + return_type, field_nodes, info, path, result) + if isawaitable(completed): + # noinspection PyShadowingNames + async def await_completed(): + try: + return await completed + except Exception as error: + self.handle_field_error( + error, field_nodes, path, return_type) + return await_completed() + return completed + except Exception as error: + self.handle_field_error( + error, field_nodes, path, return_type) + return None + + def handle_field_error( + self, raw_error: Exception, field_nodes: List[FieldNode], + path: ResponsePath, return_type: GraphQLOutputType) -> None: + if not isinstance(raw_error, GraphQLError): + raw_error = GraphQLError(str(raw_error), original_error=raw_error) + error = located_error( + raw_error, field_nodes, response_path_as_list(path)) + + # If the field type is non-nullable, then it is resolved without any + # protection from errors, however it still properly locates the error. + if is_non_null_type(return_type): + raise error + # Otherwise, error protection is applied, logging the error and + # resolving a null value for this field if one is encountered. + self.errors.append(error) + return None + + def complete_value( + self, return_type: GraphQLOutputType, field_nodes: List[FieldNode], + info: GraphQLResolveInfo, path: ResponsePath, result: Any + ) -> MaybeAwaitable[Any]: + """Complete a value. + + Implements the instructions for completeValue as defined in the + "Field entries" section of the spec. + + If the field type is Non-Null, then this recursively completes the + value for the inner type. It throws a field error if that completion + returns null, as per the "Nullability" section of the spec. + + If the field type is a List, then this recursively completes the value + for the inner type on each item in the list. + + If the field type is a Scalar or Enum, ensures the completed value is a + legal value of the type by calling the `serialize` method of GraphQL + type definition. + + If the field is an abstract type, determine the runtime type of the + value and then complete based on that type + + Otherwise, the field type expects a sub-selection set, and will + complete the value by evaluating all sub-selections. + """ + # If result is an Exception, throw a located error. + if isinstance(result, Exception): + raise result + + # If field type is NonNull, complete for inner type, and throw field + # error if result is null. + if is_non_null_type(return_type): + completed = self.complete_value( + cast(GraphQLNonNull, return_type).of_type, + field_nodes, info, path, result) + if completed is None: + raise TypeError( + 'Cannot return null for non-nullable field' + f' {info.parent_type.name}.{info.field_name}.') + return completed + + # If result value is null-ish (null, INVALID, or NaN) then return null. + if is_nullish(result): + return None + + # If field type is List, complete each item in the list with inner type + if is_list_type(return_type): + return self.complete_list_value( + cast(GraphQLList, return_type), + field_nodes, info, path, result) + + # If field type is a leaf type, Scalar or Enum, serialize to a valid + # value, returning null if serialization is not possible. + if is_leaf_type(return_type): + return self.complete_leaf_value( + cast(GraphQLLeafType, return_type), result) + + # If field type is an abstract type, Interface or Union, determine the + # runtime Object type and complete for that type. + if is_abstract_type(return_type): + return self.complete_abstract_value( + cast(GraphQLAbstractType, return_type), + field_nodes, info, path, result) + + # If field type is Object, execute and complete all sub-selections. + if is_object_type(return_type): + return self.complete_object_value( + cast(GraphQLObjectType, return_type), + field_nodes, info, path, result) + + # Not reachable. All possible output types have been considered. + raise TypeError( + f'Cannot complete value of unexpected type {return_type}.') + + def complete_list_value( + self, return_type: GraphQLList[GraphQLOutputType], + field_nodes: List[FieldNode], info: GraphQLResolveInfo, + path: ResponsePath, result: Iterable[Any] + ) -> MaybeAwaitable[Any]: + """Complete a list value. + + Complete a list value by completing each item in the list with the + inner type. + """ + if not isinstance(result, Iterable) or isinstance(result, str): + raise TypeError( + 'Expected Iterable, but did not find one for field' + f' {info.parent_type.name}.{info.field_name}.') + + # This is specified as a simple map, however we're optimizing the path + # where the list contains no coroutine objects by avoiding creating + # another coroutine object. + item_type = return_type.of_type + is_async = False + completed_results: List[Any] = [] + append = completed_results.append + for index, item in enumerate(result): + # No need to modify the info object containing the path, + # since from here on it is not ever accessed by resolver functions. + field_path = add_path(path, index) + completed_item = self.complete_value_catching_error( + item_type, field_nodes, info, field_path, item) + + if not is_async and isawaitable(completed_item): + is_async = True + append(completed_item) + + if is_async: + async def get_completed_results(): + return [await value if isawaitable(value) else value + for value in completed_results] + return get_completed_results() + return completed_results + + @staticmethod + def complete_leaf_value( + return_type: GraphQLLeafType, + result: Any) -> Any: + """Complete a leaf value. + + Complete a Scalar or Enum by serializing to a valid value, returning + null if serialization is not possible. + """ + serialized_result = return_type.serialize(result) + if is_invalid(serialized_result): + raise TypeError( + f"Expected a value of type '{return_type}'" + f' but received: {result!r}') + return serialized_result + + def complete_abstract_value( + self, return_type: GraphQLAbstractType, + field_nodes: List[FieldNode], info: GraphQLResolveInfo, + path: ResponsePath, result: Any + ) -> MaybeAwaitable[Any]: + """Complete an abstract value. + + Complete a value of an abstract type by determining the runtime object + type of that value, then complete the value for that type. + """ + resolve_type = return_type.resolve_type + runtime_type = resolve_type( + result, info) if resolve_type else default_resolve_type_fn( + result, info, return_type) + + if isawaitable(runtime_type): + async def await_complete_object_value(): + value = self.complete_object_value( + self.ensure_valid_runtime_type( + await runtime_type, return_type, + field_nodes, info, result), + field_nodes, info, path, result) + if isawaitable(value): + return await value + return value + return await_complete_object_value() + runtime_type = cast( + Optional[Union[GraphQLObjectType, str]], runtime_type) + + return self.complete_object_value( + self.ensure_valid_runtime_type( + runtime_type, return_type, + field_nodes, info, result), + field_nodes, info, path, result) + + def ensure_valid_runtime_type( + self, runtime_type_or_name: Optional[ + Union[GraphQLObjectType, str]], + return_type: GraphQLAbstractType, field_nodes: List[FieldNode], + info: GraphQLResolveInfo, result: Any) -> GraphQLObjectType: + runtime_type = self.schema.get_type( + runtime_type_or_name) if isinstance( + runtime_type_or_name, str) else runtime_type_or_name + + if not is_object_type(runtime_type): + raise GraphQLError( + f'Abstract type {return_type.name} must resolve' + ' to an Object type at runtime' + f' for field {info.parent_type.name}.{info.field_name}' + f" with value {result!r}, received '{runtime_type}'." + f' Either the {return_type.name} type should provide' + ' a "resolve_type" function or each possible type should' + ' provide an "is_type_of" function.', field_nodes) + runtime_type = cast(GraphQLObjectType, runtime_type) + + if not self.schema.is_possible_type(return_type, runtime_type): + raise GraphQLError( + f"Runtime Object type '{runtime_type.name}' is not a possible" + f" type for '{return_type.name}'.", field_nodes) + + return runtime_type + + def complete_object_value( + self, return_type: GraphQLObjectType, field_nodes: List[FieldNode], + info: GraphQLResolveInfo, path: ResponsePath, result: Any + ) -> MaybeAwaitable[Dict[str, Any]]: + """Complete an Object value by executing all sub-selections.""" + # If there is an is_type_of predicate function, call it with the + # current result. If is_type_of returns false, then raise an error + # rather than continuing execution. + if return_type.is_type_of: + is_type_of = return_type.is_type_of(result, info) + + if isawaitable(is_type_of): + async def collect_and_execute_subfields_async(): + if not await is_type_of: + raise invalid_return_type_error( + return_type, result, field_nodes) + return self.collect_and_execute_subfields( + return_type, field_nodes, path, result) + return collect_and_execute_subfields_async() + + if not is_type_of: + raise invalid_return_type_error( + return_type, result, field_nodes) + + return self.collect_and_execute_subfields( + return_type, field_nodes, path, result) + + def collect_and_execute_subfields( + self, return_type: GraphQLObjectType, + field_nodes: List[FieldNode], path: ResponsePath, + result: Any) -> MaybeAwaitable[Dict[str, Any]]: + """Collect sub-fields to execute to complete this value.""" + sub_field_nodes = self.collect_subfields(return_type, field_nodes) + return self.execute_fields(return_type, result, path, sub_field_nodes) + + def collect_subfields( + self, return_type: GraphQLObjectType, + field_nodes: List[FieldNode]) -> Dict[str, List[FieldNode]]: + """Collect subfields. + + # A cached collection of relevant subfields with regard to the + # return type is kept in the execution context as _subfields_cache. + # This ensures the subfields are not repeatedly calculated, + # which saves overhead when resolving lists of values. + """ + cache_key = return_type, tuple(field_nodes) + sub_field_nodes = self._subfields_cache.get(cache_key) + if sub_field_nodes is None: + sub_field_nodes = {} + visited_fragment_names: Set[str] = set() + for field_node in field_nodes: + selection_set = field_node.selection_set + if selection_set: + sub_field_nodes = self.collect_fields( + return_type, selection_set, + sub_field_nodes, visited_fragment_names) + self._subfields_cache[cache_key] = sub_field_nodes + return sub_field_nodes + + +def assert_valid_execution_arguments( + schema: GraphQLSchema, document: DocumentNode, + raw_variable_values: Dict[str, Any]=None) -> None: + """Check that the arguments are acceptable. + + Essential assertions before executing to provide developer feedback for + improper use of the GraphQL library. + """ + if not document: + raise TypeError('Must provide document') + + # If the schema used for execution is invalid, throw an error. + assert_valid_schema(schema) + + # Variables, if provided, must be a dictionary. + if not (raw_variable_values is None or + isinstance(raw_variable_values, dict)): + raise TypeError( + 'Variables must be provided as a dictionary where each property is' + ' a variable value. Perhaps look to see if an unparsed JSON string' + ' was provided.') + + +def response_path_as_list(path: ResponsePath) -> List[Union[str, int]]: + """Get response path as a list. + + Given a ResponsePath (found in the `path` entry in the information provided + as the last argument to a field resolver), return a list of the path keys. + """ + flattened: List[Union[str, int]] = [] + append = flattened.append + curr: Optional[ResponsePath] = path + while curr: + append(curr.key) + curr = curr.prev + return flattened[::-1] + + +def add_path( + prev: Optional[ResponsePath], key: Union[str, int]) -> ResponsePath: + """Add a key to a response path. + + Given a ResponsePath and a key, return a new ResponsePath containing the + new key. + """ + return ResponsePath(prev, key) + + +def get_field_def( + schema: GraphQLSchema, + parent_type: GraphQLObjectType, + field_name: str) -> GraphQLField: + """Get field definition. + + This method looks up the field on the given type definition. + It has special casing for the two introspection fields, __schema + and __typename. __typename is special because it can always be + queried as a field, even in situations where no other fields + are allowed, like on a Union. __schema could get automatically + added to the query type, but that would require mutating type + definitions, which would cause issues. + """ + if (field_name == '__schema' and + schema.query_type == parent_type): + return SchemaMetaFieldDef + elif (field_name == '__type' and + schema.query_type == parent_type): + return TypeMetaFieldDef + elif field_name == '__typename': + return TypeNameMetaFieldDef + return parent_type.fields.get(field_name) + + +def get_field_entry_key(node: FieldNode) -> str: + """Implements the logic to compute the key of a given field's entry""" + return node.alias.value if node.alias else node.name.value + + +def invalid_return_type_error( + return_type: GraphQLObjectType, + result: Any, + field_nodes: List[FieldNode]) -> GraphQLError: + """Create a GraphQLError for an invalid return type.""" + return GraphQLError( + f"Expected value of type '{return_type.name}'" + f' but got: {result!r}.', field_nodes) + + +def default_resolve_type_fn( + value: Any, + info: GraphQLResolveInfo, + abstract_type: GraphQLAbstractType + ) -> MaybeAwaitable[Optional[Union[GraphQLObjectType, str]]]: + """Default type resolver function. + + If a resolveType function is not given, then a default resolve behavior is + used which attempts two strategies: + + First, See if the provided value has a `__typename` field defined, if so, + use that value as name of the resolved type. + + Otherwise, test each possible type for the abstract type by calling + is_type_of for the object being coerced, returning the first type that + matches. + """ + + # First, look for `__typename`. + if isinstance(value, dict) and isinstance(value.get('__typename'), str): + return value['__typename'] + + # Otherwise, test each possible type. + possible_types = info.schema.get_possible_types(abstract_type) + is_type_of_results_async = [] + + for type_ in possible_types: + if type_.is_type_of: + is_type_of_result = type_.is_type_of(value, info) + + if isawaitable(is_type_of_result): + is_type_of_results_async.append((is_type_of_result, type_)) + elif is_type_of_result: + return type_ + + if is_type_of_results_async: + # noinspection PyShadowingNames + async def get_type(): + is_type_of_results = [ + (await is_type_of_result, type_) + for is_type_of_result, type_ in is_type_of_results_async] + for is_type_of_result, type_ in is_type_of_results: + if is_type_of_result: + return type_ + return get_type() + + return None + + +def default_field_resolver(source, info, **args): + """Default field resolver. + + If a resolve function is not given, then a default resolve behavior is used + which takes the property of the source object of the same name as the field + and returns it as the result, or if it's a function, returns the result + of calling that function while passing along args and context. + + For dictionaries, the field names are used as keys, for all other objects + they are used as attribute names. + """ + # ensure source is a value for which property access is acceptable. + field_name = info.field_name + value = source.get(field_name) if isinstance( + source, dict) else getattr(source, field_name, None) + if callable(value): + return value(info, **args) + return value diff --git a/graphql/execution/values.py b/graphql/execution/values.py new file mode 100644 index 00000000..ffcb0e85 --- /dev/null +++ b/graphql/execution/values.py @@ -0,0 +1,184 @@ +from typing import Any, Dict, List, NamedTuple, Optional, Union, cast + +from ..error import GraphQLError, INVALID +from ..language import ( + ArgumentNode, DirectiveNode, ExecutableDefinitionNode, FieldNode, + NullValueNode, SchemaDefinitionNode, SelectionNode, TypeDefinitionNode, + TypeExtensionNode, VariableDefinitionNode, VariableNode, print_ast) +from ..type import ( + GraphQLDirective, GraphQLField, GraphQLInputType, GraphQLSchema, + is_input_type, is_non_null_type) +from ..utilities import coerce_value, type_from_ast, value_from_ast + +__all__ = [ + 'get_variable_values', 'get_argument_values', 'get_directive_values'] + + +class CoercedVariableValues(NamedTuple): + errors: Optional[List[GraphQLError]] + coerced: Optional[Dict[str, Any]] + + +def get_variable_values( + schema: GraphQLSchema, var_def_nodes: List[VariableDefinitionNode], + inputs: Dict[str, Any]) -> CoercedVariableValues: + """Get coerced variable values based on provided definitions. + + Prepares a dict of variable values of the correct type based on the + provided variable definitions and arbitrary input. If the input cannot be + parsed to match the variable definitions, a GraphQLError will be thrown. + """ + errors: List[GraphQLError] = [] + coerced_values: Dict[str, Any] = {} + for var_def_node in var_def_nodes: + var_name = var_def_node.variable.name.value + var_type = type_from_ast(schema, var_def_node.type) + if not is_input_type(var_type): + # Must use input types for variables. This should be caught during + # validation, however is checked again here for safety. + errors.append(GraphQLError( + f"Variable '${var_name}' expected value of type" + f" '{print_ast(var_def_node.type)}'" + ' which cannot be used as an input type.', + [var_def_node.type])) + else: + var_type = cast(GraphQLInputType, var_type) + has_value = var_name in inputs + value = inputs[var_name] if has_value else INVALID + if not has_value and var_def_node.default_value: + # If no value was provided to a variable with a default value, + # use the default value + coerced_values[var_name] = value_from_ast( + var_def_node.default_value, var_type) + elif (not has_value or value is None) and is_non_null_type( + var_type): + errors.append(GraphQLError( + f"Variable '${var_name}' of non-null type" + f" '{var_type}' must not be null." if has_value else + f"Variable '${var_name}' of required type" + f" '{var_type}' was not provided.", + [var_def_node])) + elif has_value: + if value is None: + # If the explicit value `None` was provided, an entry in + # the coerced values must exist as the value `None`. + coerced_values[var_name] = None + else: + # Otherwise, a non-null value was provided, coerce it to + # the expected type or report an error if coercion fails. + coerced = coerce_value(value, var_type, var_def_node) + coercion_errors = coerced.errors + if coercion_errors: + for error in coercion_errors: + error.message = ( + f"Variable '${var_name}' got invalid" + f" value {value!r}; {error.message}") + errors.extend(coercion_errors) + else: + coerced_values[var_name] = coerced.value + return (CoercedVariableValues(errors, None) if errors else + CoercedVariableValues(None, coerced_values)) + + +def get_argument_values( + type_def: Union[GraphQLField, GraphQLDirective], + node: Union[FieldNode, DirectiveNode], + variable_values: Dict[str, Any]=None) -> Dict[str, Any]: + """Get coerced argument values based on provided definitions and nodes. + + Prepares an dict of argument values given a list of argument definitions + and list of argument AST nodes. + """ + coerced_values: Dict[str, Any] = {} + arg_defs = type_def.args + arg_nodes = node.arguments + if not arg_defs or arg_nodes is None: + return coerced_values + arg_node_map = {arg.name.value: arg for arg in arg_nodes} + for name, arg_def in arg_defs.items(): + arg_type = arg_def.type + argument_node = cast(ArgumentNode, arg_node_map.get(name)) + variable_values = cast(Dict[str, Any], variable_values) + if argument_node and isinstance(argument_node.value, VariableNode): + variable_name = argument_node.value.name.value + has_value = variable_values and variable_name in variable_values + is_null = has_value and variable_values[variable_name] is None + else: + has_value = argument_node is not None + is_null = has_value and isinstance( + argument_node.value, NullValueNode) + if not has_value and arg_def.default_value is not INVALID: + # If no argument was provided where the definition has a default + # value, use the default value. + coerced_values[name] = arg_def.default_value + elif (not has_value or is_null) and is_non_null_type(arg_type): + # If no argument or a null value was provided to an argument with a + # non-null type (required), produce a field error. + if is_null: + raise GraphQLError( + f"Argument '{name}' of non-null type" + f" '{arg_type}' must not be null.", [argument_node.value]) + elif argument_node and isinstance( + argument_node.value, VariableNode): + raise GraphQLError( + f"Argument '{name}' of required type" + f" '{arg_type}' was provided the variable" + f" '${variable_name}'" + ' which was not provided a runtime value.', + [argument_node.value]) + else: + raise GraphQLError( + f"Argument '{name}' of required type '{arg_type}'" + ' was not provided.', [node]) + elif has_value: + if isinstance(argument_node.value, NullValueNode): + # If the explicit value `None` was provided, an entry in the + # coerced values must exist as the value `None`. + coerced_values[name] = None + elif isinstance(argument_node.value, VariableNode): + variable_name = argument_node.value.name.value + # Note: This Does no further checking that this variable is + # correct. This assumes that this query has been validated and + # the variable usage here is of the correct type. + coerced_values[name] = variable_values[variable_name] + else: + value_node = argument_node.value + coerced_value = value_from_ast( + value_node, arg_type, variable_values) + if coerced_value is INVALID: + # Note: values_of_correct_type validation should catch + # this before execution. This is a runtime check to + # ensure execution does not continue with an invalid + # argument value. + raise GraphQLError( + f"Argument '{name}'" + f" has invalid value {print_ast(value_node)}.", + [argument_node.value]) + coerced_values[name] = coerced_value + return coerced_values + + +NodeWithDirective = Union[ + ExecutableDefinitionNode, SelectionNode, + SchemaDefinitionNode, TypeDefinitionNode, TypeExtensionNode] + + +def get_directive_values( + directive_def: GraphQLDirective, node: NodeWithDirective, + variable_values: Dict[str, Any] = None) -> Optional[Dict[str, Any]]: + """Get coerced argument values based on provided nodes. + + Prepares a dict of argument values given a directive definition and + an AST node which may contain directives. Optionally also accepts a + dict of variable values. + + If the directive does not exist on the node, returns None. + """ + directives = node.directives + if directives: + directive_name = directive_def.name + for directive in directives: + if directive.name.value == directive_name: + return get_argument_values( + directive_def, directive, variable_values) + return None diff --git a/graphql/graphql.py b/graphql/graphql.py new file mode 100644 index 00000000..a5de20f6 --- /dev/null +++ b/graphql/graphql.py @@ -0,0 +1,147 @@ +from asyncio import ensure_future +from inspect import isawaitable +from typing import Any, Awaitable, Callable, Dict, Union, cast + +from .error import GraphQLError +from .execution import execute +from .language import parse, Source +from .pyutils import MaybeAwaitable +from .type import GraphQLSchema, validate_schema +from .execution.execute import ExecutionResult + +__all__ = ['graphql', 'graphql_sync'] + + +async def graphql( + schema: GraphQLSchema, + source: Union[str, Source], + root_value: Any=None, + context_value: Any=None, + variable_values: Dict[str, Any]=None, + operation_name: str=None, + field_resolver: Callable=None) -> ExecutionResult: + """Execute a GraphQL operation asynchronously. + + This is the primary entry point function for fulfilling GraphQL operations + by parsing, validating, and executing a GraphQL document along side a + GraphQL schema. + + More sophisticated GraphQL servers, such as those which persist queries, + may wish to separate the validation and execution phases to a static time + tooling step, and a server runtime step. + + Accepts the following arguments: + + :arg schema: + The GraphQL type system to use when validating and executing a query. + :arg source: + A GraphQL language formatted string representing the requested + operation. + :arg root_value: + The value provided as the first argument to resolver functions on the + top level type (e.g. the query object type). + :arg context_value: + The context value is provided as an attribute of the second argument + (the resolve info) to resolver functions. It is used to pass shared + information useful at any point during query execution, for example the + currently logged in user and connections to databases or other services. + :arg variable_values: + A mapping of variable name to runtime value to use for all variables + defined in the request string. + :arg operation_name: + The name of the operation to use if request string contains multiple + possible operations. Can be omitted if request string contains only + one operation. + :arg field_resolver: + A resolver function to use when one is not provided by the schema. + If not provided, the default field resolver is used (which looks for + a value or method on the source value with the field's name). + """ + # Always return asynchronously for a consistent API. + result = graphql_impl( + schema, + source, + root_value, + context_value, + variable_values, + operation_name, + field_resolver) + + if isawaitable(result): + return await cast(Awaitable[ExecutionResult], result) + + return cast(ExecutionResult, result) + + +def graphql_sync( + schema: GraphQLSchema, + source: Union[str, Source], + root_value: Any = None, + context_value: Any = None, + variable_values: Dict[str, Any] = None, + operation_name: str = None, + field_resolver: Callable = None) -> ExecutionResult: + """Execute a GraphQL operation synchronously. + + The graphql_sync function also fulfills GraphQL operations by parsing, + validating, and executing a GraphQL document along side a GraphQL schema. + However, it guarantees to complete synchronously (or throw an error) + assuming that all field resolvers are also synchronous. + """ + result = graphql_impl( + schema, + source, + root_value, + context_value, + variable_values, + operation_name, + field_resolver) + + # Assert that the execution was synchronous. + if isawaitable(result): + ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() + raise RuntimeError( + 'GraphQL execution failed to complete synchronously.') + + return cast(ExecutionResult, result) + + +def graphql_impl( + schema, + source, + root_value, + context_value, + variable_values, + operation_name, + field_resolver + ) -> MaybeAwaitable[ExecutionResult]: + """Execute a query, return asynchronously only if necessary.""" + # Validate Schema + schema_validation_errors = validate_schema(schema) + if schema_validation_errors: + return ExecutionResult(data=None, errors=schema_validation_errors) + + # Parse + try: + document = parse(source) + except GraphQLError as error: + return ExecutionResult(data=None, errors=[error]) + except Exception as error: + error = GraphQLError(str(error), original_error=error) + return ExecutionResult(data=None, errors=[error]) + + # Validate + from .validation import validate + validation_errors = validate(schema, document) + if validation_errors: + return ExecutionResult(data=None, errors=validation_errors) + + # Execute + return execute( + schema, + document, + root_value, + context_value, + variable_values, + operation_name, + field_resolver) diff --git a/graphql/language/__init__.py b/graphql/language/__init__.py new file mode 100644 index 00000000..6014ca42 --- /dev/null +++ b/graphql/language/__init__.py @@ -0,0 +1,73 @@ +"""GraphQL Language + +The `graphql.language` package is responsible for parsing and operating +on the GraphQL language. +""" + +from .location import get_location, SourceLocation +from .lexer import Lexer, TokenKind, Token +from .parser import parse, parse_type, parse_value +from .printer import print_ast +from .source import Source +from .visitor import ( + visit, Visitor, ParallelVisitor, TypeInfoVisitor, + BREAK, SKIP, REMOVE, IDLE) +from .ast import ( + Location, Node, + # Each kind of AST node + NameNode, DocumentNode, DefinitionNode, + ExecutableDefinitionNode, + OperationDefinitionNode, OperationType, + VariableDefinitionNode, VariableNode, + SelectionSetNode, SelectionNode, + FieldNode, ArgumentNode, + FragmentSpreadNode, InlineFragmentNode, FragmentDefinitionNode, + ValueNode, IntValueNode, FloatValueNode, StringValueNode, + BooleanValueNode, NullValueNode, EnumValueNode, ListValueNode, + ObjectValueNode, ObjectFieldNode, DirectiveNode, + TypeNode, NamedTypeNode, ListTypeNode, NonNullTypeNode, + TypeSystemDefinitionNode, SchemaDefinitionNode, + OperationTypeDefinitionNode, TypeDefinitionNode, + ScalarTypeDefinitionNode, ObjectTypeDefinitionNode, + FieldDefinitionNode, InputValueDefinitionNode, + InterfaceTypeDefinitionNode, UnionTypeDefinitionNode, + EnumTypeDefinitionNode, EnumValueDefinitionNode, + InputObjectTypeDefinitionNode, + DirectiveDefinitionNode, TypeSystemExtensionNode, + SchemaExtensionNode, TypeExtensionNode, ScalarTypeExtensionNode, + ObjectTypeExtensionNode, InterfaceTypeExtensionNode, + UnionTypeExtensionNode, EnumTypeExtensionNode, + InputObjectTypeExtensionNode) +from .directive_locations import DirectiveLocation + +__all__ = [ + 'get_location', 'SourceLocation', + 'Lexer', 'TokenKind', 'Token', + 'parse', 'parse_value', 'parse_type', + 'print_ast', 'Source', + 'visit', 'Visitor', 'ParallelVisitor', 'TypeInfoVisitor', + 'BREAK', 'SKIP', 'REMOVE', 'IDLE', + 'Location', 'DirectiveLocation', 'Node', + 'NameNode', 'DocumentNode', 'DefinitionNode', + 'ExecutableDefinitionNode', + 'OperationDefinitionNode', 'OperationType', + 'VariableDefinitionNode', 'VariableNode', + 'SelectionSetNode', 'SelectionNode', + 'FieldNode', 'ArgumentNode', + 'FragmentSpreadNode', 'InlineFragmentNode', 'FragmentDefinitionNode', + 'ValueNode', 'IntValueNode', 'FloatValueNode', 'StringValueNode', + 'BooleanValueNode', 'NullValueNode', 'EnumValueNode', 'ListValueNode', + 'ObjectValueNode', 'ObjectFieldNode', 'DirectiveNode', + 'TypeNode', 'NamedTypeNode', 'ListTypeNode', 'NonNullTypeNode', + 'TypeSystemDefinitionNode', 'SchemaDefinitionNode', + 'OperationTypeDefinitionNode', 'TypeDefinitionNode', + 'ScalarTypeDefinitionNode', 'ObjectTypeDefinitionNode', + 'FieldDefinitionNode', 'InputValueDefinitionNode', + 'InterfaceTypeDefinitionNode', 'UnionTypeDefinitionNode', + 'EnumTypeDefinitionNode', 'EnumValueDefinitionNode', + 'InputObjectTypeDefinitionNode', + 'DirectiveDefinitionNode', 'TypeSystemExtensionNode', + 'SchemaExtensionNode', 'TypeExtensionNode', 'ScalarTypeExtensionNode', + 'ObjectTypeExtensionNode', 'InterfaceTypeExtensionNode', + 'UnionTypeExtensionNode', 'EnumTypeExtensionNode', + 'InputObjectTypeExtensionNode'] diff --git a/graphql/language/ast.py b/graphql/language/ast.py new file mode 100644 index 00000000..73e6d09b --- /dev/null +++ b/graphql/language/ast.py @@ -0,0 +1,465 @@ +from copy import deepcopy +from enum import Enum +from typing import List, NamedTuple, Optional, Union + +from .lexer import Token +from .source import Source +from ..pyutils import camel_to_snake + +__all__ = [ + 'Location', 'Node', + 'NameNode', 'DocumentNode', 'DefinitionNode', + 'ExecutableDefinitionNode', 'OperationDefinitionNode', + 'VariableDefinitionNode', + 'SelectionSetNode', 'SelectionNode', + 'FieldNode', 'ArgumentNode', + 'FragmentSpreadNode', 'InlineFragmentNode', 'FragmentDefinitionNode', + 'ValueNode', 'VariableNode', + 'IntValueNode', 'FloatValueNode', 'StringValueNode', + 'BooleanValueNode', 'NullValueNode', + 'EnumValueNode', 'ListValueNode', 'ObjectValueNode', 'ObjectFieldNode', + 'DirectiveNode', 'TypeNode', 'NamedTypeNode', + 'ListTypeNode', 'NonNullTypeNode', + 'TypeSystemDefinitionNode', 'SchemaDefinitionNode', + 'OperationType', 'OperationTypeDefinitionNode', + 'TypeDefinitionNode', + 'ScalarTypeDefinitionNode', 'ObjectTypeDefinitionNode', + 'FieldDefinitionNode', 'InputValueDefinitionNode', + 'InterfaceTypeDefinitionNode', 'UnionTypeDefinitionNode', + 'EnumTypeDefinitionNode', 'EnumValueDefinitionNode', + 'InputObjectTypeDefinitionNode', + 'DirectiveDefinitionNode', 'SchemaExtensionNode', + 'TypeExtensionNode', 'TypeSystemExtensionNode', 'ScalarTypeExtensionNode', + 'ObjectTypeExtensionNode', 'InterfaceTypeExtensionNode', + 'UnionTypeExtensionNode', 'EnumTypeExtensionNode', + 'InputObjectTypeExtensionNode'] + + +class Location(NamedTuple): + """AST Location + Contains a range of UTF-8 character offsets and token references that + identify the region of the source from which the AST derived. + """ + + start: int # character offset at which this Node begins + end: int # character offset at which this Node ends + start_token: Token # Token at which this Node begins + end_token: Token # Token at which this Node ends. + source: Source # Source document the AST represents + + def __str__(self): + return f'{self.start}:{self.end}' + + def __eq__(self, other): + if isinstance(other, Location): + return self.start == other.start and self.end == other.end + elif isinstance(other, (list, tuple)) and len(other) == 2: + return self.start == other[0] and self.end == other[1] + return False + + def __ne__(self, other): + return not self.__eq__(other) + + +class OperationType(Enum): + + QUERY = 'query' + MUTATION = 'mutation' + SUBSCRIPTION = 'subscription' + + +# Base AST Node + +class Node: + """AST nodes""" + __slots__ = 'loc', + + loc: Optional[Location] + + kind: str = 'ast' # the kind of the node as a snake_case string + keys = ['loc'] # the names of the attributes of this node + + def __init__(self, **kwargs): + """Initialize the node with the given keyword arguments.""" + for key in self.keys: + setattr(self, key, kwargs.get(key)) + + def __repr__(self): + """Get a simple representation of the node.""" + name, loc = self.__class__.__name__, getattr(self, 'loc', None) + return f'{name} at {loc}' if loc else name + + def __eq__(self, other): + """Test whether two nodes are equal (recursively).""" + return (isinstance(other, Node) and + self.__class__ == other.__class__ and + all(getattr(self, key) == getattr(other, key) + for key in self.keys)) + + def __hash__(self): + return id(self) + + def __copy__(self): + """Create a shallow copy of the node.""" + return self.__class__(**{key: getattr(self, key) for key in self.keys}) + + def __deepcopy__(self, memo): + """Create a deep copy of the node""" + # noinspection PyArgumentList + return self.__class__( + **{key: deepcopy(getattr(self, key), memo) for key in self.keys}) + + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + name = cls.__name__ + if name.endswith('Node'): + name = name[:-4] + cls.kind = camel_to_snake(name) + keys = [] + for base in cls.__bases__: + # noinspection PyUnresolvedReferences + keys.extend(base.keys) + keys.extend(cls.__slots__) + cls.keys = keys + + +# Name + +class NameNode(Node): + __slots__ = 'value', + + value: str + + +# Document + +class DocumentNode(Node): + __slots__ = 'definitions', + + definitions: List['DefinitionNode'] + + +class DefinitionNode(Node): + __slots__ = () + + +class ExecutableDefinitionNode(DefinitionNode): + __slots__ = 'name', 'directives', 'variable_definitions', 'selection_set' + + directives: Optional[List['DirectiveNode']] + variable_definitions: List['VariableDefinitionNode'] + selection_set: 'SelectionSetNode' + + +class OperationDefinitionNode(ExecutableDefinitionNode): + __slots__ = 'operation', + + operation: OperationType + name: Optional[NameNode] + + +class VariableDefinitionNode(Node): + __slots__ = 'variable', 'type', 'default_value' + + variable: 'VariableNode' + type: 'TypeNode' + default_value: Optional['ValueNode'] + + +class SelectionSetNode(Node): + __slots__ = 'selections', + + selections: List['SelectionNode'] + + +class SelectionNode(Node): + __slots__ = 'directives', + + directives: Optional[List['DirectiveNode']] + + +class FieldNode(SelectionNode): + __slots__ = 'alias', 'name', 'arguments', 'selection_set' + + alias: Optional[NameNode] + name: NameNode + arguments: Optional[List['ArgumentNode']] + selection_set: Optional[SelectionSetNode] + + +class ArgumentNode(Node): + __slots__ = 'name', 'value' + + name: NameNode + value: 'ValueNode' + + +# Fragments + +class FragmentSpreadNode(SelectionNode): + __slots__ = 'name', + + name: NameNode + + +class InlineFragmentNode(SelectionNode): + __slots__ = 'type_condition', 'selection_set' + + type_condition: 'NamedTypeNode' + selection_set: SelectionSetNode + + +class FragmentDefinitionNode(ExecutableDefinitionNode): + __slots__ = 'type_condition', + + name: NameNode + type_condition: 'NamedTypeNode' + + +# Values + +class ValueNode(Node): + __slots__ = () + + +class VariableNode(ValueNode): + __slots__ = 'name', + + name: NameNode + + +class IntValueNode(ValueNode): + __slots__ = 'value', + + value: str + + +class FloatValueNode(ValueNode): + __slots__ = 'value', + + value: str + + +class StringValueNode(ValueNode): + __slots__ = 'value', 'block' + + value: str + block: Optional[bool] + + +class BooleanValueNode(ValueNode): + __slots__ = 'value', + + value: bool + + +class NullValueNode(ValueNode): + __slots__ = () + + +class EnumValueNode(ValueNode): + __slots__ = 'value', + + value: str + + +class ListValueNode(ValueNode): + __slots__ = 'values', + + values: List[ValueNode] + + +class ObjectValueNode(ValueNode): + __slots__ = 'fields', + + fields: List['ObjectFieldNode'] + + +class ObjectFieldNode(Node): + __slots__ = 'name', 'value' + + name: NameNode + value: ValueNode + + +# Directives + +class DirectiveNode(Node): + __slots__ = 'name', 'arguments' + + name: NameNode + arguments: List[ArgumentNode] + + +# Type Reference + +class TypeNode(Node): + __slots__ = () + + +class NamedTypeNode(TypeNode): + __slots__ = 'name', + + name: NameNode + + +class ListTypeNode(TypeNode): + __slots__ = 'type', + + type: TypeNode + + +class NonNullTypeNode(TypeNode): + __slots__ = 'type', + + type: Union[NamedTypeNode, ListTypeNode] + + +# Type System Definition + +class TypeSystemDefinitionNode(DefinitionNode): + __slots__ = () + + +class SchemaDefinitionNode(TypeSystemDefinitionNode): + __slots__ = 'directives', 'operation_types' + + directives: Optional[List[DirectiveNode]] + operation_types: List['OperationTypeDefinitionNode'] + + +class OperationTypeDefinitionNode(TypeSystemDefinitionNode): + __slots__ = 'operation', 'type' + + operation: OperationType + type: NamedTypeNode + + +# Type Definition + +class TypeDefinitionNode(TypeSystemDefinitionNode): + __slots__ = 'description', 'name', 'directives' + + description: Optional[StringValueNode] + name: NameNode + directives: Optional[List[DirectiveNode]] + + +class ScalarTypeDefinitionNode(TypeDefinitionNode): + __slots__ = () + + +class ObjectTypeDefinitionNode(TypeDefinitionNode): + __slots__ = 'interfaces', 'fields' + + interfaces: Optional[List[NamedTypeNode]] + fields: Optional[List['FieldDefinitionNode']] + + +class FieldDefinitionNode(TypeDefinitionNode): + __slots__ = 'arguments', 'type' + + arguments: Optional[List['InputValueDefinitionNode']] + type: TypeNode + + +class InputValueDefinitionNode(TypeDefinitionNode): + __slots__ = 'type', 'default_value' + + type: TypeNode + default_value: Optional[ValueNode] + + +class InterfaceTypeDefinitionNode(TypeDefinitionNode): + __slots__ = 'fields', + + fields: Optional[List['FieldDefinitionNode']] + + +class UnionTypeDefinitionNode(TypeDefinitionNode): + __slots__ = 'types', + + types: Optional[List[NamedTypeNode]] + + +class EnumTypeDefinitionNode(TypeDefinitionNode): + __slots__ = 'values', + + values: Optional[List['EnumValueDefinitionNode']] + + +class EnumValueDefinitionNode(TypeDefinitionNode): + __slots__ = () + + +class InputObjectTypeDefinitionNode(TypeDefinitionNode): + __slots__ = 'fields', + + fields: Optional[List[InputValueDefinitionNode]] + + +# Directive Definitions + +class DirectiveDefinitionNode(TypeSystemDefinitionNode): + __slots__ = 'description', 'name', 'arguments', 'locations' + + description: Optional[StringValueNode] + name: NameNode + arguments: Optional[List[InputValueDefinitionNode]] + locations: List[NameNode] + + +# Type System Extensions + +class SchemaExtensionNode(Node): + __slots__ = 'directives', 'operation_types' + + directives: Optional[List[DirectiveNode]] + operation_types: Optional[List[OperationTypeDefinitionNode]] + + +# Type Extensions + +class TypeExtensionNode(TypeSystemDefinitionNode): + __slots__ = 'name', 'directives' + + name: NameNode + directives: Optional[List[DirectiveNode]] + + +TypeSystemExtensionNode = Union[SchemaExtensionNode, TypeExtensionNode] + + +class ScalarTypeExtensionNode(TypeExtensionNode): + __slots__ = () + + +class ObjectTypeExtensionNode(TypeExtensionNode): + __slots__ = 'interfaces', 'fields' + + interfaces: Optional[List[NamedTypeNode]] + fields: Optional[List[FieldDefinitionNode]] + + +class InterfaceTypeExtensionNode(TypeExtensionNode): + __slots__ = 'fields', + + fields: Optional[List[FieldDefinitionNode]] + + +class UnionTypeExtensionNode(TypeExtensionNode): + __slots__ = 'types', + + types: Optional[List[NamedTypeNode]] + + +class EnumTypeExtensionNode(TypeExtensionNode): + __slots__ = 'values', + + values: Optional[List[EnumValueDefinitionNode]] + + +class InputObjectTypeExtensionNode(TypeExtensionNode): + __slots__ = 'fields', + + fields: Optional[List[InputValueDefinitionNode]] diff --git a/graphql/language/block_string_value.py b/graphql/language/block_string_value.py new file mode 100644 index 00000000..f0e5e2a2 --- /dev/null +++ b/graphql/language/block_string_value.py @@ -0,0 +1,41 @@ +__all__ = ['block_string_value'] + + +def block_string_value(raw_string: str) -> str: + """Produce the value of a block string from its parsed raw value. + + Similar to Coffeescript's block string, Python's docstring trim or + Ruby's strip_heredoc. + + This implements the GraphQL spec's BlockStringValue() static algorithm. + + """ + lines = raw_string.splitlines() + + common_indent = None + for line in lines[1:]: + indent = leading_whitespace(line) + if indent < len(line) and ( + common_indent is None or indent < common_indent): + common_indent = indent + if common_indent == 0: + break + + if common_indent: + lines[1:] = [line[common_indent:] for line in lines[1:]] + + while lines and not lines[0].strip(): + lines = lines[1:] + + while lines and not lines[-1].strip(): + lines = lines[:-1] + + return '\n'.join(lines) + + +def leading_whitespace(s): + i = 0 + n = len(s) + while i < n and s[i] in ' \t': + i += 1 + return i diff --git a/graphql/language/directive_locations.py b/graphql/language/directive_locations.py new file mode 100644 index 00000000..3fe96187 --- /dev/null +++ b/graphql/language/directive_locations.py @@ -0,0 +1,29 @@ +from enum import Enum + +__all__ = ['DirectiveLocation'] + + +class DirectiveLocation(Enum): + """The enum type representing the directive location values.""" + + # Request Definitions + QUERY = 'query' + MUTATION = 'mutation' + SUBSCRIPTION = 'subscription' + FIELD = 'field' + FRAGMENT_DEFINITION = 'fragment definition' + FRAGMENT_SPREAD = 'fragment spread' + INLINE_FRAGMENT = 'inline fragment' + + # Type System Definitions + SCHEMA = 'schema' + SCALAR = 'scalar' + OBJECT = 'object' + FIELD_DEFINITION = 'field definition' + ARGUMENT_DEFINITION = 'argument definition' + INTERFACE = 'interface' + UNION = 'union' + ENUM = 'enum' + ENUM_VALUE = 'enum value' + INPUT_OBJECT = 'input object' + INPUT_FIELD_DEFINITION = 'input field definition' diff --git a/graphql/language/lexer.py b/graphql/language/lexer.py new file mode 100644 index 00000000..253d61b9 --- /dev/null +++ b/graphql/language/lexer.py @@ -0,0 +1,446 @@ +from copy import copy +from enum import Enum +from typing import List, Optional + +from ..error import GraphQLSyntaxError +from .source import Source +from .block_string_value import block_string_value + +__all__ = ['Lexer', 'TokenKind', 'Token'] + + +class TokenKind(Enum): + """Each kind of token""" + SOF = '' + EOF = '' + BANG = '!' + DOLLAR = '$' + AMP = '&' + PAREN_L = '(' + PAREN_R = ')' + SPREAD = '...' + COLON = ':' + EQUALS = '=' + AT = '@' + BRACKET_L = '[' + BRACKET_R = ']' + BRACE_L = '{' + PIPE = '|' + BRACE_R = '}' + NAME = 'Name' + INT = 'Int' + FLOAT = 'Float' + STRING = 'String' + BLOCK_STRING = 'BlockString' + COMMENT = 'Comment' + + +class Token: + __slots__ = ('kind', 'start', 'end', 'line', 'column', + 'prev', 'next', 'value') + + def __init__(self, kind: TokenKind, start: int, end: int, + line: int, column: int, + prev: 'Token'=None, value: str=None) -> None: + self.kind = kind + self.start, self.end = start, end + self.line, self.column = line, column + self.prev: Optional[Token] = prev or None + self.next: Optional[Token] = None + self.value: Optional[str] = value or None + + def __repr__(self): + return ''.format( + self.desc, self.start, self.end, self.line, self.column) + + def __eq__(self, other): + if isinstance(other, Token): + return (self.kind == other.kind and + self.start == other.start and + self.end == other.end and + self.line == other.line and + self.column == other.column and + self.value == other.value) + elif isinstance(other, str): + return other == self.desc + return False + + def __copy__(self): + """Create a shallow copy of the token""" + return self.__class__( + self.kind, self.start, self.end, self.line, self.column, + self.prev, self.value) + + def __deepcopy__(self, memo): + """Allow only shallow copies to avoid recursion.""" + return copy(self) + + @property + def desc(self) -> str: + """A helper property to describe a token as a string for debugging""" + kind, value = self.kind.value, self.value + return f'{kind} {value!r}' if value else kind + + +def char_at(s, pos): + try: + return s[pos] + except IndexError: + return None + + +def print_char(char): + return TokenKind.EOF.value if char is None else repr(char) + + +_KIND_FOR_PUNCT = { + '!': TokenKind.BANG, + '$': TokenKind.DOLLAR, + '&': TokenKind.AMP, + '(': TokenKind.PAREN_L, + ')': TokenKind.PAREN_R, + ':': TokenKind.COLON, + '=': TokenKind.EQUALS, + '@': TokenKind.AT, + '[': TokenKind.BRACKET_L, + ']': TokenKind.BRACKET_R, + '{': TokenKind.BRACE_L, + '}': TokenKind.BRACE_R, + '|': TokenKind.PIPE +} + + +class Lexer: + """GraphQL Lexer + + A Lexer is a stateful stream generator in that every time + it is advanced, it returns the next token in the Source. Assuming the + source lexes, the final Token emitted by the lexer will be of kind + EOF, after which the lexer will repeatedly return the same EOF token + whenever called. + + """ + + def __init__(self, source: Source, + no_location=False, + experimental_fragment_variables=False) -> None: + """Given a Source object, this returns a Lexer for that source.""" + self.source = source + self.token = self.last_token = Token(TokenKind.SOF, 0, 0, 0, 0) + self.line, self.line_start = 1, 0 + self.no_location = no_location + self.experimental_fragment_variables = experimental_fragment_variables + + def advance(self): + self.last_token = self.token + token = self.token = self.lookahead() + return token + + def lookahead(self): + token = self.token + if token.kind != TokenKind.EOF: + while True: + if not token.next: + token.next = self.read_token(token) + token = token.next + if token.kind != TokenKind.COMMENT: + break + return token + + def read_token(self, prev: Token) -> Token: + """Get the next token from the source starting at the given position. + + This skips over whitespace and comments until it finds the next + lexable token, then lexes punctuators immediately or calls the + appropriate helper function for more complicated tokens. + + """ + source = self.source + body = source.body + body_length = len(body) + + pos = self.position_after_whitespace(body, prev.end) + line = self.line + col = 1 + pos - self.line_start + + if pos >= body_length: + return Token( + TokenKind.EOF, body_length, body_length, line, col, prev) + + char = char_at(body, pos) + if char is not None: + kind = _KIND_FOR_PUNCT.get(char) + if kind: + return Token(kind, pos, pos + 1, line, col, prev) + if char == '#': + return read_comment(source, pos, line, col, prev) + elif char == '.': + if (char == char_at(body, pos + 1) == + char_at(body, pos + 2)): + return Token(TokenKind.SPREAD, pos, pos + 3, + line, col, prev) + elif 'A' <= char <= 'Z' or 'a' <= char <= 'z' or char == '_': + return read_name(source, pos, line, col, prev) + elif '0' <= char <= '9' or char == '-': + return read_number(source, pos, char, line, col, prev) + elif char == '"': + if (char == char_at(body, pos + 1) == + char_at(body, pos + 2)): + return read_block_string(source, pos, line, col, prev) + return read_string(source, pos, line, col, prev) + + raise GraphQLSyntaxError( + source, pos, unexpected_character_message(char)) + + def position_after_whitespace(self, body, start_position: int) -> int: + """Go to next position after a whitespace. + + Reads from body starting at startPosition until it finds a + non-whitespace or commented character, then returns the position + of that character for lexing. + + """ + body_length = len(body) + position = start_position + while position < body_length: + char = char_at(body, position) + if char is not None and char in ' \t,\ufeff': + position += 1 + elif char == '\n': + position += 1 + self.line += 1 + self.line_start = position + elif char == '\r': + if char_at(body, position + 1) == '\n': + position += 2 + else: + position += 1 + self.line += 1 + self.line_start = position + else: + break + return position + + +def unexpected_character_message(char): + if char < ' ' and char not in '\t\n\r': + return f'Cannot contain the invalid character {print_char(char)}.' + if char == "'": + return ("Unexpected single quote character (')," + ' did you mean to use a double quote (")?') + return f'Cannot parse the unexpected character {print_char(char)}.' + + +def read_comment(source: Source, start, line, col, prev) -> Token: + """Read a comment token from the source file.""" + body = source.body + position = start + while True: + position += 1 + char = char_at(body, position) + if char is None or (char < ' ' and char != '\t'): + break + return Token(TokenKind.COMMENT, start, position, line, col, prev, + body[start + 1:position]) + + +def read_number(source: Source, start, char, line, col, prev) -> Token: + """Reads a number token from the source file. + + Either a float or an int depending on whether a decimal point appears. + + """ + body = source.body + position = start + is_float = False + if char == '-': + position += 1 + char = char_at(body, position) + if char == '0': + position += 1 + char = char_at(body, position) + if char is not None and '0' <= char <= '9': + raise GraphQLSyntaxError( + source, position, 'Invalid number,' + f' unexpected digit after 0: {print_char(char)}.') + else: + position = read_digits(source, position, char) + char = char_at(body, position) + if char == '.': + is_float = True + position += 1 + char = char_at(body, position) + position = read_digits(source, position, char) + char = char_at(body, position) + if char is not None and char in 'Ee': + is_float = True + position += 1 + char = char_at(body, position) + if char is not None and char in '+-': + position += 1 + char = char_at(body, position) + position = read_digits(source, position, char) + return Token(TokenKind.FLOAT if is_float else TokenKind.INT, + start, position, line, col, prev, body[start:position]) + + +def read_digits(source: Source, start, char) -> int: + """Return the new position in the source after reading digits.""" + body = source.body + position = start + while char is not None and '0' <= char <= '9': + position += 1 + char = char_at(body, position) + if position == start: + raise GraphQLSyntaxError( + source, position, + f'Invalid number, expected digit but got: {print_char(char)}.') + return position + + +_ESCAPED_CHARS = { + '"': '"', + '/': '/', + '\\': '\\', + 'b': '\b', + 'f': '\f', + 'n': '\n', + 'r': '\r', + 't': '\t', +} + + +def read_string(source: Source, start, line, col, prev) -> Token: + """Read a string token from the source file.""" + body = source.body + position = start + 1 + chunk_start = position + value: List[str] = [] + append = value.append + + while position < len(body): + char = char_at(body, position) + if char is None or char in '\n\r': + break + if char == '"': + append(body[chunk_start:position]) + return Token(TokenKind.STRING, start, position + 1, line, col, + prev, ''.join(value)) + if char < ' ' and char != '\t': + raise GraphQLSyntaxError( + source, position, + f'Invalid character within String: {print_char(char)}.') + position += 1 + if char == '\\': + append(body[chunk_start:position - 1]) + char = char_at(body, position) + escaped = _ESCAPED_CHARS.get(char) + if escaped: + value.append(escaped) + elif char == 'u': + code = uni_char_code( + char_at(body, position + 1), + char_at(body, position + 2), + char_at(body, position + 3), + char_at(body, position + 4)) + if code < 0: + escape = repr(body[position:position + 5]) + escape = escape[:1] + '\\' + escape[1:] + raise GraphQLSyntaxError( + source, position, + f'Invalid character escape sequence: {escape}.') + append(chr(code)) + position += 4 + else: + escape = repr(char) + escape = escape[:1] + '\\' + escape[1:] + raise GraphQLSyntaxError( + source, position, + f'Invalid character escape sequence: {escape}.') + position += 1 + chunk_start = position + + raise GraphQLSyntaxError( + source, position, 'Unterminated string.') + + +def read_block_string(source: Source, start, line, col, prev) -> Token: + body = source.body + position = start + 3 + chunk_start = position + raw_value = '' + + while position < len(body): + char = char_at(body, position) + if char is None: + break + if (char == '"' and char_at(body, position + 1) == '"' + and char_at(body, position + 2) == '"'): + raw_value += body[chunk_start:position] + return Token(TokenKind.BLOCK_STRING, start, position + 3, + line, col, prev, block_string_value(raw_value)) + if char < ' ' and char not in '\t\n\r': + raise GraphQLSyntaxError( + source, position, + f'Invalid character within String: {print_char(char)}.') + if (char == '\\' and char_at(body, position + 1) == '"' + and char_at(body, position + 2) == '"' + and char_at(body, position + 3) == '"'): + raw_value += body[chunk_start:position] + '"""' + position += 4 + chunk_start = position + else: + position += 1 + + raise GraphQLSyntaxError(source, position, 'Unterminated string.') + + +def uni_char_code(a, b, c, d): + """Convert unicode characters to integers. + + Converts four hexadecimal chars to the integer that the + string represents. For example, uni_char_code('0','0','0','f') + will return 15, and uni_char_code('0','0','f','f') returns 255. + + Returns a negative number on error, if a char was invalid. + + This is implemented by noting that char2hex() returns -1 on error, + which means the result of ORing the char2hex() will also be negative. + """ + return (char2hex(a) << 12 | char2hex(b) << 8 | + char2hex(c) << 4 | char2hex(d)) + + +def char2hex(a): + """Convert a hex character to its integer value. + + '0' becomes 0, '9' becomes 9 + 'A' becomes 10, 'F' becomes 15 + 'a' becomes 10, 'f' becomes 15 + + Returns -1 on error. + + """ + if '0' <= a <= '9': + return ord(a) - 48 + elif 'A' <= a <= 'F': + return ord(a) - 55 + elif 'a' <= a <= 'f': # a-f + return ord(a) - 87 + return -1 + + +def read_name(source: Source, start, line, col, prev) -> Token: + """Read an alphanumeric + underscore name from the source.""" + body = source.body + body_length = len(body) + position = start + 1 + while position < body_length: + char = char_at(body, position) + if char is None or not ( + char == '_' or '0' <= char <= '9' or + 'A' <= char <= 'Z' or 'a' <= char <= 'z'): + break + position += 1 + return Token(TokenKind.NAME, start, position, line, col, + prev, body[start:position]) diff --git a/graphql/language/location.py b/graphql/language/location.py new file mode 100644 index 00000000..729d5453 --- /dev/null +++ b/graphql/language/location.py @@ -0,0 +1,21 @@ +from typing import NamedTuple, TYPE_CHECKING + +if TYPE_CHECKING: + from .source import Source # noqa: F401 + +__all__ = ['get_location', 'SourceLocation'] + + +class SourceLocation(NamedTuple): + """Represents a location in a Source.""" + line: int + column: int + + +def get_location(source: 'Source', position: int) -> SourceLocation: + """Get the line and column for a character position in the source. + + Takes a Source and a UTF-8 character offset, and returns the corresponding + line and column as a SourceLocation. + """ + return source.get_location(position) diff --git a/graphql/language/parser.py b/graphql/language/parser.py new file mode 100644 index 00000000..031e767b --- /dev/null +++ b/graphql/language/parser.py @@ -0,0 +1,969 @@ +from typing import Callable, List, Optional, Union, cast, Dict + +from .ast import ( + ArgumentNode, BooleanValueNode, DefinitionNode, + DirectiveDefinitionNode, DirectiveNode, DocumentNode, + EnumTypeDefinitionNode, EnumTypeExtensionNode, EnumValueDefinitionNode, + EnumValueNode, ExecutableDefinitionNode, FieldDefinitionNode, FieldNode, + FloatValueNode, FragmentDefinitionNode, FragmentSpreadNode, + InlineFragmentNode, InputObjectTypeDefinitionNode, + InputObjectTypeExtensionNode, InputValueDefinitionNode, IntValueNode, + InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, ListTypeNode, + ListValueNode, Location, NameNode, NamedTypeNode, Node, NonNullTypeNode, + NullValueNode, ObjectFieldNode, ObjectTypeDefinitionNode, + ObjectTypeExtensionNode, ObjectValueNode, OperationDefinitionNode, + OperationType, OperationTypeDefinitionNode, ScalarTypeDefinitionNode, + ScalarTypeExtensionNode, SchemaDefinitionNode, SchemaExtensionNode, + SelectionNode, SelectionSetNode, StringValueNode, + TypeNode, TypeSystemDefinitionNode, TypeSystemExtensionNode, + UnionTypeDefinitionNode, UnionTypeExtensionNode, ValueNode, + VariableDefinitionNode, VariableNode) +from .directive_locations import DirectiveLocation +from .lexer import Lexer, Token, TokenKind +from .source import Source +from ..error import GraphQLError, GraphQLSyntaxError + +__all__ = ['parse', 'parse_type', 'parse_value'] + +SourceType = Union[Source, str] + + +def parse(source: SourceType, + no_location=False, + experimental_fragment_variables=False) -> DocumentNode: + """Given a GraphQL source, parse it into a Document. + + Throws GraphQLError if a syntax error is encountered. + + By default, the parser creates AST nodes that know the location + in the source that they correspond to. The `no_location` option + disables that behavior for performance or testing. + """ + if isinstance(source, str): + source = Source(source) + elif not isinstance(source, Source): + raise TypeError(f'Must provide Source. Received: {source!r}') + lexer = Lexer( + source, no_location=no_location, + experimental_fragment_variables=experimental_fragment_variables) + return parse_document(lexer) + + +def parse_value(source: SourceType, **options: dict) -> ValueNode: + """Parse the AST for a given string containing a GraphQL value. + + Throws GraphQLError if a syntax error is encountered. + + This is useful within tools that operate upon GraphQL Values directly and + in isolation of complete GraphQL documents. + + Consider providing the results to the utility function: value_from_ast(). + """ + if isinstance(source, str): + source = Source(source) + lexer = Lexer(source, **options) + expect(lexer, TokenKind.SOF) + value = parse_value_literal(lexer, False) + expect(lexer, TokenKind.EOF) + return value + + +def parse_type(source: SourceType, **options: dict) -> TypeNode: + """Parse the AST for a given string containing a GraphQL Type. + + Throws GraphQLError if a syntax error is encountered. + + This is useful within tools that operate upon GraphQL Types directly and + in isolation of complete GraphQL documents. + + Consider providing the results to the utility function: type_from_ast(). + """ + if isinstance(source, str): + source = Source(source) + lexer = Lexer(source, **options) + expect(lexer, TokenKind.SOF) + type_ = parse_type_reference(lexer) + expect(lexer, TokenKind.EOF) + return type_ + + +def parse_name(lexer: Lexer) -> NameNode: + """Convert a name lex token into a name parse node.""" + token = expect(lexer, TokenKind.NAME) + return NameNode(value=token.value, loc=loc(lexer, token)) + + +def parse_document(lexer: Lexer) -> DocumentNode: + """Document: Definition+""" + start = lexer.token + expect(lexer, TokenKind.SOF) + definitions: List[DefinitionNode] = [] + append = definitions.append + while True: + append(parse_definition(lexer)) + if skip(lexer, TokenKind.EOF): + break + return DocumentNode(definitions=definitions, loc=loc(lexer, start)) + + +def parse_definition(lexer: Lexer) -> DefinitionNode: + """Definition: ExecutableDefinition or TypeSystemDefinition""" + if peek(lexer, TokenKind.NAME): + func = _parse_definition_functions.get(cast(str, lexer.token.value)) + if func: + return func(lexer) + elif peek(lexer, TokenKind.BRACE_L): + return parse_executable_definition(lexer) + elif peek_description(lexer): + return parse_type_system_definition(lexer) + raise unexpected(lexer) + + +def parse_executable_definition(lexer: Lexer) -> ExecutableDefinitionNode: + """ExecutableDefinition: OperationDefinition or FragmentDefinition""" + if peek(lexer, TokenKind.NAME): + func = _parse_executable_definition_functions.get( + cast(str, lexer.token.value)) + if func: + return func(lexer) + elif peek(lexer, TokenKind.BRACE_L): + return parse_operation_definition(lexer) + raise unexpected(lexer) + + +# Implement the parsing rules in the Operations section. + +def parse_operation_definition(lexer: Lexer) -> OperationDefinitionNode: + """OperationDefinition""" + start = lexer.token + if peek(lexer, TokenKind.BRACE_L): + return OperationDefinitionNode( + operation=OperationType.QUERY, name=None, + variable_definitions=[], directives=[], + selection_set=parse_selection_set(lexer), + loc=loc(lexer, start)) + operation = parse_operation_type(lexer) + name = parse_name(lexer) if peek(lexer, TokenKind.NAME) else None + return OperationDefinitionNode( + operation=operation, name=name, + variable_definitions=parse_variable_definitions(lexer), + directives=parse_directives(lexer, False), + selection_set=parse_selection_set(lexer), + loc=loc(lexer, start)) + + +def parse_operation_type(lexer: Lexer) -> OperationType: + """OperationType: one of query mutation subscription""" + operation_token = expect(lexer, TokenKind.NAME) + try: + return OperationType(operation_token.value) + except ValueError: + raise unexpected(lexer, operation_token) + + +def parse_variable_definitions(lexer: Lexer) -> List[VariableDefinitionNode]: + """VariableDefinitions: (VariableDefinition+)""" + return cast(List[VariableDefinitionNode], many_nodes( + lexer, TokenKind.PAREN_L, parse_variable_definition, TokenKind.PAREN_R + )) if peek(lexer, TokenKind.PAREN_L) else [] + + +def parse_variable_definition(lexer: Lexer) -> VariableDefinitionNode: + """VariableDefinition: Variable: Type DefaultValue?""" + start = lexer.token + return VariableDefinitionNode( + variable=parse_variable(lexer), + type=expect(lexer, TokenKind.COLON) and parse_type_reference(lexer), + default_value=parse_value_literal(lexer, True) + if skip(lexer, TokenKind.EQUALS) else None, + loc=loc(lexer, start)) + + +def parse_variable(lexer: Lexer) -> VariableNode: + """Variable: $Name""" + start = lexer.token + expect(lexer, TokenKind.DOLLAR) + return VariableNode(name=parse_name(lexer), loc=loc(lexer, start)) + + +def parse_selection_set(lexer: Lexer) -> SelectionSetNode: + """SelectionSet: {Selection+}""" + start = lexer.token + return SelectionSetNode( + selections=many_nodes( + lexer, TokenKind.BRACE_L, parse_selection, TokenKind.BRACE_R), + loc=loc(lexer, start)) + + +def parse_selection(lexer: Lexer) -> SelectionNode: + """Selection: Field or FragmentSpread or InlineFragment""" + return (parse_fragment if peek(lexer, TokenKind.SPREAD) + else parse_field)(lexer) + + +def parse_field(lexer: Lexer) -> FieldNode: + """Field: Alias? Name Arguments? Directives? SelectionSet?""" + start = lexer.token + name_or_alias = parse_name(lexer) + if skip(lexer, TokenKind.COLON): + alias: Optional[NameNode] = name_or_alias + name = parse_name(lexer) + else: + alias = None + name = name_or_alias + return FieldNode( + alias=alias, name=name, + arguments=parse_arguments(lexer, False), + directives=parse_directives(lexer, False), + selection_set=parse_selection_set(lexer) + if peek(lexer, TokenKind.BRACE_L) else None, + loc=loc(lexer, start)) + + +def parse_arguments(lexer: Lexer, is_const: bool) -> List[ArgumentNode]: + """Arguments[Const]: (Argument[?Const]+)""" + item = parse_const_argument if is_const else parse_argument + return cast(List[ArgumentNode], many_nodes( + lexer, TokenKind.PAREN_L, item, + TokenKind.PAREN_R)) if peek(lexer, TokenKind.PAREN_L) else [] + + +def parse_argument(lexer: Lexer) -> ArgumentNode: + """Argument: Name : Value""" + start = lexer.token + return ArgumentNode( + name=parse_name(lexer), + value=expect(lexer, TokenKind.COLON) and + parse_value_literal(lexer, False), + loc=loc(lexer, start)) + + +def parse_const_argument(lexer: Lexer) -> ArgumentNode: + """Argument[Const]: Name : Value[?Const]""" + start = lexer.token + return ArgumentNode( + name=parse_name(lexer), + value=expect(lexer, TokenKind.COLON) + and parse_const_value(lexer), + loc=loc(lexer, start)) + + +# Implement the parsing rules in the Fragments section. + +def parse_fragment( + lexer: Lexer) -> Union[FragmentSpreadNode, InlineFragmentNode]: + """Corresponds to both FragmentSpread and InlineFragment in the spec. + + FragmentSpread: ... FragmentName Directives? + InlineFragment: ... TypeCondition? Directives? SelectionSet + """ + start = lexer.token + expect(lexer, TokenKind.SPREAD) + if peek(lexer, TokenKind.NAME) and lexer.token.value != 'on': + return FragmentSpreadNode( + name=parse_fragment_name(lexer), + directives=parse_directives(lexer, False), + loc=loc(lexer, start)) + if lexer.token.value == 'on': + lexer.advance() + type_condition: Optional[NamedTypeNode] = parse_named_type(lexer) + else: + type_condition = None + return InlineFragmentNode( + type_condition=type_condition, + directives=parse_directives(lexer, False), + selection_set=parse_selection_set(lexer), + loc=loc(lexer, start)) + + +def parse_fragment_definition(lexer: Lexer) -> FragmentDefinitionNode: + """FragmentDefinition""" + start = lexer.token + expect_keyword(lexer, 'fragment') + # Experimental support for defining variables within fragments changes + # the grammar of FragmentDefinition + if lexer.experimental_fragment_variables: + return FragmentDefinitionNode( + name=parse_fragment_name(lexer), + variable_definitions=parse_variable_definitions(lexer), + type_condition=expect_keyword(lexer, 'on') and + parse_named_type(lexer), + directives=parse_directives(lexer, False), + selection_set=parse_selection_set(lexer), + loc=loc(lexer, start)) + return FragmentDefinitionNode( + name=parse_fragment_name(lexer), + type_condition=expect_keyword(lexer, 'on') and + parse_named_type(lexer), + directives=parse_directives(lexer, False), + selection_set=parse_selection_set(lexer), + loc=loc(lexer, start)) + + +_parse_executable_definition_functions: Dict[str, Callable] = {**dict.fromkeys( + ('query', 'mutation', 'subscription'), + parse_operation_definition), **dict.fromkeys( + ('fragment',), parse_fragment_definition)} + + +def parse_fragment_name(lexer: Lexer) -> NameNode: + """FragmentName: Name but not `on`""" + if lexer.token.value == 'on': + raise unexpected(lexer) + return parse_name(lexer) + + +# Implements the parsing rules in the Values section. + +def parse_value_literal(lexer: Lexer, is_const: bool) -> ValueNode: + func = _parse_value_literal_functions.get(lexer.token.kind) + if func: + return func(lexer, is_const) # type: ignore + raise unexpected(lexer) + + +def parse_string_literal(lexer: Lexer, _is_const=True) -> StringValueNode: + token = lexer.token + lexer.advance() + return StringValueNode( + value=token.value, + block=token.kind == TokenKind.BLOCK_STRING, + loc=loc(lexer, token)) + + +def parse_const_value(lexer: Lexer) -> ValueNode: + return parse_value_literal(lexer, True) + + +def parse_value_value(lexer: Lexer) -> ValueNode: + return parse_value_literal(lexer, False) + + +def parse_list(lexer: Lexer, is_const: bool) -> ListValueNode: + """ListValue[Const]""" + start = lexer.token + item = parse_const_value if is_const else parse_value_value + return ListValueNode( + values=any_nodes( + lexer, TokenKind.BRACKET_L, item, TokenKind.BRACKET_R), + loc=loc(lexer, start)) + + +def parse_object_field(lexer: Lexer, is_const: bool) -> ObjectFieldNode: + start = lexer.token + return ObjectFieldNode( + name=parse_name(lexer), + value=expect(lexer, TokenKind.COLON) and + parse_value_literal(lexer, is_const), + loc=loc(lexer, start)) + + +def parse_object(lexer: Lexer, is_const: bool) -> ObjectValueNode: + """ObjectValue[Const]""" + start = lexer.token + expect(lexer, TokenKind.BRACE_L) + fields: List[ObjectFieldNode] = [] + append = fields.append + while not skip(lexer, TokenKind.BRACE_R): + append(parse_object_field(lexer, is_const)) + return ObjectValueNode(fields=fields, loc=loc(lexer, start)) + + +def parse_int(lexer: Lexer, _is_const=True) -> IntValueNode: + token = lexer.token + lexer.advance() + return IntValueNode(value=token.value, loc=loc(lexer, token)) + + +def parse_float(lexer: Lexer, _is_const=True) -> FloatValueNode: + token = lexer.token + lexer.advance() + return FloatValueNode(value=token.value, loc=loc(lexer, token)) + + +def parse_named_values(lexer: Lexer, _is_const=True) -> ValueNode: + token = lexer.token + value = token.value + lexer.advance() + if value in ('true', 'false'): + return BooleanValueNode(value=value == 'true', loc=loc(lexer, token)) + elif value == 'null': + return NullValueNode(loc=loc(lexer, token)) + else: + return EnumValueNode(value=value, loc=loc(lexer, token)) + + +def parse_variable_value(lexer: Lexer, is_const) -> VariableNode: + if not is_const: + return parse_variable(lexer) + raise unexpected(lexer) + + +_parse_value_literal_functions = { + TokenKind.BRACKET_L: parse_list, + TokenKind.BRACE_L: parse_object, + TokenKind.INT: parse_int, + TokenKind.FLOAT: parse_float, + TokenKind.STRING: parse_string_literal, + TokenKind.BLOCK_STRING: parse_string_literal, + TokenKind.NAME: parse_named_values, + TokenKind.DOLLAR: parse_variable_value} + + +# Implement the parsing rules in the Directives section. + +def parse_directives(lexer: Lexer, is_const: bool) -> List[DirectiveNode]: + """Directives[Const]: Directive[?Const]+""" + directives: List[DirectiveNode] = [] + append = directives.append + while peek(lexer, TokenKind.AT): + append(parse_directive(lexer, is_const)) + return directives + + +def parse_directive(lexer: Lexer, is_const: bool) -> DirectiveNode: + """Directive[Const]: @ Name Arguments[?Const]?""" + start = lexer.token + expect(lexer, TokenKind.AT) + return DirectiveNode( + name=parse_name(lexer), + arguments=parse_arguments(lexer, is_const), + loc=loc(lexer, start)) + + +# Implement the parsing rules in the Types section. + +def parse_type_reference(lexer: Lexer) -> TypeNode: + """Type: NamedType or ListType or NonNullType""" + start = lexer.token + if skip(lexer, TokenKind.BRACKET_L): + type_ = parse_type_reference(lexer) + expect(lexer, TokenKind.BRACKET_R) + type_ = ListTypeNode(type=type_, loc=loc(lexer, start)) + else: + type_ = parse_named_type(lexer) + if skip(lexer, TokenKind.BANG): + return NonNullTypeNode(type=type_, loc=loc(lexer, start)) + return type_ + + +def parse_named_type(lexer: Lexer) -> NamedTypeNode: + """NamedType: Name""" + start = lexer.token + return NamedTypeNode(name=parse_name(lexer), loc=loc(lexer, start)) + + +# Implement the parsing rules in the Type Definition section. + +def parse_type_system_definition(lexer: Lexer) -> TypeSystemDefinitionNode: + """TypeSystemDefinition""" + # Many definitions begin with a description and require a lookahead. + keyword_token = lexer.lookahead( + ) if peek_description(lexer) else lexer.token + func = _parse_type_system_definition_functions.get(keyword_token.value) + if func: + return func(lexer) + raise unexpected(lexer, keyword_token) + + +def parse_type_system_extension(lexer: Lexer) -> TypeSystemExtensionNode: + """TypeSystemExtension""" + keyword_token = lexer.lookahead() + if keyword_token.kind == TokenKind.NAME: + func = _parse_type_extension_functions.get(keyword_token.value) + if func: + return func(lexer) + raise unexpected(lexer, keyword_token) + + +_parse_definition_functions: Dict[str, Callable] = {**dict.fromkeys( + ('query', 'mutation', 'subscription', 'fragment'), + parse_executable_definition), **dict.fromkeys( + ('schema', 'scalar', 'type', 'interface', 'union', 'enum', + 'input', 'directive'), parse_type_system_definition), + 'extend': parse_type_system_extension} + + +def peek_description(lexer: Lexer) -> bool: + return peek(lexer, TokenKind.STRING) or peek(lexer, TokenKind.BLOCK_STRING) + + +def parse_description(lexer: Lexer) -> Optional[StringValueNode]: + """Description: StringValue""" + if peek_description(lexer): + return parse_string_literal(lexer) + return None + + +def parse_schema_definition(lexer: Lexer) -> SchemaDefinitionNode: + """SchemaDefinition""" + start = lexer.token + expect_keyword(lexer, 'schema') + directives = parse_directives(lexer, True) + operation_types = many_nodes( + lexer, TokenKind.BRACE_L, + parse_operation_type_definition, TokenKind.BRACE_R) + return SchemaDefinitionNode( + directives=directives, operation_types=operation_types, + loc=loc(lexer, start)) + + +def parse_operation_type_definition( + lexer: Lexer) -> OperationTypeDefinitionNode: + """OperationTypeDefinition: OperationType : NamedType""" + start = lexer.token + operation = parse_operation_type(lexer) + expect(lexer, TokenKind.COLON) + type_ = parse_named_type(lexer) + return OperationTypeDefinitionNode( + operation=operation, type=type_, loc=loc(lexer, start)) + + +def parse_scalar_type_definition(lexer: Lexer) -> ScalarTypeDefinitionNode: + """ScalarTypeDefinition: Description? scalar Name Directives[Const]?""" + start = lexer.token + description = parse_description(lexer) + expect_keyword(lexer, 'scalar') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + return ScalarTypeDefinitionNode( + description=description, name=name, directives=directives, + loc=loc(lexer, start)) + + +def parse_object_type_definition(lexer: Lexer) -> ObjectTypeDefinitionNode: + """ObjectTypeDefinition""" + start = lexer.token + description = parse_description(lexer) + expect_keyword(lexer, 'type') + name = parse_name(lexer) + interfaces = parse_implements_interfaces(lexer) + directives = parse_directives(lexer, True) + fields = parse_fields_definition(lexer) + return ObjectTypeDefinitionNode( + description=description, name=name, interfaces=interfaces, + directives=directives, fields=fields, loc=loc(lexer, start)) + + +def parse_implements_interfaces(lexer: Lexer) -> List[NamedTypeNode]: + """ImplementsInterfaces""" + types: List[NamedTypeNode] = [] + if lexer.token.value == 'implements': + lexer.advance() + # optional leading ampersand + skip(lexer, TokenKind.AMP) + append = types.append + while True: + append(parse_named_type(lexer)) + if not skip(lexer, TokenKind.AMP): + break + return types + + +def parse_fields_definition(lexer: Lexer) -> List[FieldDefinitionNode]: + """FieldsDefinition: {FieldDefinition+}""" + return cast(List[FieldDefinitionNode], many_nodes( + lexer, TokenKind.BRACE_L, parse_field_definition, + TokenKind.BRACE_R)) if peek(lexer, TokenKind.BRACE_L) else [] + + +def parse_field_definition(lexer: Lexer) -> FieldDefinitionNode: + """FieldDefinition""" + start = lexer.token + description = parse_description(lexer) + name = parse_name(lexer) + args = parse_argument_defs(lexer) + expect(lexer, TokenKind.COLON) + type_ = parse_type_reference(lexer) + directives = parse_directives(lexer, True) + return FieldDefinitionNode( + description=description, name=name, arguments=args, type=type_, + directives=directives, loc=loc(lexer, start)) + + +def parse_argument_defs(lexer: Lexer) -> List[InputValueDefinitionNode]: + """ArgumentsDefinition: (InputValueDefinition+)""" + return cast(List[InputValueDefinitionNode], many_nodes( + lexer, TokenKind.PAREN_L, parse_input_value_def, + TokenKind.PAREN_R)) if peek(lexer, TokenKind.PAREN_L) else [] + + +def parse_input_value_def(lexer: Lexer) -> InputValueDefinitionNode: + """InputValueDefinition""" + start = lexer.token + description = parse_description(lexer) + name = parse_name(lexer) + expect(lexer, TokenKind.COLON) + type_ = parse_type_reference(lexer) + default_value = parse_const_value(lexer) if skip( + lexer, TokenKind.EQUALS) else None + directives = parse_directives(lexer, True) + return InputValueDefinitionNode( + description=description, name=name, type=type_, + default_value=default_value, directives=directives, + loc=loc(lexer, start)) + + +def parse_interface_type_definition( + lexer: Lexer) -> InterfaceTypeDefinitionNode: + """InterfaceTypeDefinition""" + start = lexer.token + description = parse_description(lexer) + expect_keyword(lexer, 'interface') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + fields = parse_fields_definition(lexer) + return InterfaceTypeDefinitionNode( + description=description, name=name, directives=directives, + fields=fields, loc=loc(lexer, start)) + + +def parse_union_type_definition(lexer: Lexer) -> UnionTypeDefinitionNode: + """UnionTypeDefinition""" + start = lexer.token + description = parse_description(lexer) + expect_keyword(lexer, 'union') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + types = parse_union_member_types(lexer) + return UnionTypeDefinitionNode( + description=description, name=name, directives=directives, types=types, + loc=loc(lexer, start)) + + +def parse_union_member_types(lexer: Lexer) -> List[NamedTypeNode]: + """UnionMemberTypes""" + types: List[NamedTypeNode] = [] + if skip(lexer, TokenKind.EQUALS): + # optional leading pipe + skip(lexer, TokenKind.PIPE) + append = types.append + while True: + append(parse_named_type(lexer)) + if not skip(lexer, TokenKind.PIPE): + break + return types + + +def parse_enum_type_definition(lexer: Lexer) -> EnumTypeDefinitionNode: + """UnionTypeDefinition""" + start = lexer.token + description = parse_description(lexer) + expect_keyword(lexer, 'enum') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + values = parse_enum_values_definition(lexer) + return EnumTypeDefinitionNode( + description=description, name=name, directives=directives, + values=values, loc=loc(lexer, start)) + + +def parse_enum_values_definition( + lexer: Lexer) -> List[EnumValueDefinitionNode]: + """EnumValuesDefinition: {EnumValueDefinition+}""" + return cast(List[EnumValueDefinitionNode], many_nodes( + lexer, TokenKind.BRACE_L, parse_enum_value_definition, + TokenKind.BRACE_R)) if peek(lexer, TokenKind.BRACE_L) else [] + + +def parse_enum_value_definition(lexer: Lexer) -> EnumValueDefinitionNode: + """EnumValueDefinition: Description? EnumValue Directives[Const]?""" + start = lexer.token + description = parse_description(lexer) + name = parse_name(lexer) + directives = parse_directives(lexer, True) + return EnumValueDefinitionNode( + description=description, name=name, directives=directives, + loc=loc(lexer, start)) + + +def parse_input_object_type_definition( + lexer: Lexer) -> InputObjectTypeDefinitionNode: + """InputObjectTypeDefinition""" + start = lexer.token + description = parse_description(lexer) + expect_keyword(lexer, 'input') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + fields = parse_input_fields_definition(lexer) + return InputObjectTypeDefinitionNode( + description=description, name=name, directives=directives, + fields=fields, loc=loc(lexer, start)) + + +def parse_input_fields_definition( + lexer: Lexer) -> List[InputValueDefinitionNode]: + """InputFieldsDefinition: {InputValueDefinition+}""" + return cast(List[InputValueDefinitionNode], many_nodes( + lexer, TokenKind.BRACE_L, parse_input_value_def, + TokenKind.BRACE_R)) if peek(lexer, TokenKind.BRACE_L) else [] + + +def parse_schema_extension(lexer: Lexer) -> SchemaExtensionNode: + """SchemaExtension""" + start = lexer.token + expect_keyword(lexer, 'extend') + expect_keyword(lexer, 'schema') + directives = parse_directives(lexer, True) + operation_types = many_nodes( + lexer, TokenKind.BRACE_L, parse_operation_type_definition, + TokenKind.BRACE_R) if peek(lexer, TokenKind.BRACE_L) else [] + if not directives and not operation_types: + raise unexpected(lexer) + return SchemaExtensionNode( + directives=directives, operation_types=operation_types, + loc=loc(lexer, start)) + + +def parse_scalar_type_extension(lexer: Lexer) -> ScalarTypeExtensionNode: + """ScalarTypeExtension""" + start = lexer.token + expect_keyword(lexer, 'extend') + expect_keyword(lexer, 'scalar') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + if not directives: + raise unexpected(lexer) + return ScalarTypeExtensionNode( + name=name, directives=directives, loc=loc(lexer, start)) + + +def parse_object_type_extension(lexer: Lexer) -> ObjectTypeExtensionNode: + """ObjectTypeExtension""" + start = lexer.token + expect_keyword(lexer, 'extend') + expect_keyword(lexer, 'type') + name = parse_name(lexer) + interfaces = parse_implements_interfaces(lexer) + directives = parse_directives(lexer, True) + fields = parse_fields_definition(lexer) + if not (interfaces or directives or fields): + raise unexpected(lexer) + return ObjectTypeExtensionNode( + name=name, interfaces=interfaces, directives=directives, fields=fields, + loc=loc(lexer, start)) + + +def parse_interface_type_extension(lexer: Lexer) -> InterfaceTypeExtensionNode: + """InterfaceTypeExtension""" + start = lexer.token + expect_keyword(lexer, 'extend') + expect_keyword(lexer, 'interface') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + fields = parse_fields_definition(lexer) + if not (directives or fields): + raise unexpected(lexer) + return InterfaceTypeExtensionNode( + name=name, directives=directives, fields=fields, loc=loc(lexer, start)) + + +def parse_union_type_extension(lexer: Lexer) -> UnionTypeExtensionNode: + """UnionTypeExtension""" + start = lexer.token + expect_keyword(lexer, 'extend') + expect_keyword(lexer, 'union') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + types = parse_union_member_types(lexer) + if not (directives or types): + raise unexpected(lexer) + return UnionTypeExtensionNode( + name=name, directives=directives, types=types, loc=loc(lexer, start)) + + +def parse_enum_type_extension(lexer: Lexer) -> EnumTypeExtensionNode: + """EnumTypeExtension""" + start = lexer.token + expect_keyword(lexer, 'extend') + expect_keyword(lexer, 'enum') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + values = parse_enum_values_definition(lexer) + if not (directives or values): + raise unexpected(lexer) + return EnumTypeExtensionNode( + name=name, directives=directives, values=values, loc=loc(lexer, start)) + + +def parse_input_object_type_extension( + lexer: Lexer) -> InputObjectTypeExtensionNode: + """InputObjectTypeExtension""" + start = lexer.token + expect_keyword(lexer, 'extend') + expect_keyword(lexer, 'input') + name = parse_name(lexer) + directives = parse_directives(lexer, True) + fields = parse_input_fields_definition(lexer) + if not (directives or fields): + raise unexpected(lexer) + return InputObjectTypeExtensionNode( + name=name, directives=directives, fields=fields, loc=loc(lexer, start)) + + +_parse_type_extension_functions: Dict[ + str, Callable[[Lexer], TypeSystemExtensionNode]] = { + 'schema': parse_schema_extension, + 'scalar': parse_scalar_type_extension, + 'type': parse_object_type_extension, + 'interface': parse_interface_type_extension, + 'union': parse_union_type_extension, + 'enum': parse_enum_type_extension, + 'input': parse_input_object_type_extension +} + + +def parse_directive_definition(lexer: Lexer) -> DirectiveDefinitionNode: + """InputObjectTypeExtension""" + start = lexer.token + description = parse_description(lexer) + expect_keyword(lexer, 'directive') + expect(lexer, TokenKind.AT) + name = parse_name(lexer) + args = parse_argument_defs(lexer) + expect_keyword(lexer, 'on') + locations = parse_directive_locations(lexer) + return DirectiveDefinitionNode( + description=description, name=name, arguments=args, + locations=locations, loc=loc(lexer, start)) + + +_parse_type_system_definition_functions = { + 'schema': parse_schema_definition, + 'scalar': parse_scalar_type_definition, + 'type': parse_object_type_definition, + 'interface': parse_interface_type_definition, + 'union': parse_union_type_definition, + 'enum': parse_enum_type_definition, + 'input': parse_input_object_type_definition, + 'directive': parse_directive_definition +} + + +def parse_directive_locations(lexer: Lexer) -> List[NameNode]: + """DirectiveLocations""" + # optional leading pipe + skip(lexer, TokenKind.PIPE) + locations: List[NameNode] = [] + append = locations.append + while True: + append(parse_directive_location(lexer)) + if not skip(lexer, TokenKind.PIPE): + break + return locations + + +def parse_directive_location(lexer: Lexer) -> NameNode: + """DirectiveLocation""" + start = lexer.token + name = parse_name(lexer) + if name.value in DirectiveLocation.__members__: + return name + raise unexpected(lexer, start) + + +# Core parsing utility functions + +def loc(lexer: Lexer, start_token: Token) -> Optional[Location]: + """Return a location object. + + Used to identify the place in the source that created + a given parsed object. + """ + if not lexer.no_location: + end_token = lexer.last_token + source = lexer.source + return Location( + start_token.start, end_token.end, start_token, end_token, source) + return None + + +def peek(lexer: Lexer, kind: TokenKind): + """Determine if the next token is of a given kind""" + return lexer.token.kind == kind + + +def skip(lexer: Lexer, kind: TokenKind) -> bool: + """Conditionally skip the next token. + + If the next token is of the given kind, return true after advancing + the lexer. Otherwise, do not change the parser state and return false. + """ + match = lexer.token.kind == kind + if match: + lexer.advance() + return match + + +def expect(lexer: Lexer, kind: TokenKind) -> Token: + """Check kind of the next token. + + If the next token is of the given kind, return that token after advancing + the lexer. Otherwise, do not change the parser state and throw an error. + """ + token = lexer.token + if token.kind == kind: + lexer.advance() + return token + raise GraphQLSyntaxError( + lexer.source, token.start, + f'Expected {kind.value}, found {token.kind.value}') + + +def expect_keyword(lexer: Lexer, value: str) -> Token: + """Check next token for given keyword + + If the next token is a keyword with the given value, return that token + after advancing the lexer. Otherwise, do not change the parser state and + return false. + """ + token = lexer.token + if token.kind == TokenKind.NAME and token.value == value: + lexer.advance() + return token + raise GraphQLSyntaxError( + lexer.source, token.start, + f'Expected {value!r}, found {token.desc}') + + +def unexpected(lexer: Lexer, at_token: Token=None) -> GraphQLError: + """Create an error when an unexpected lexed token is encountered.""" + token = at_token or lexer.token + return GraphQLSyntaxError( + lexer.source, token.start, f'Unexpected {token.desc}') + + +def any_nodes(lexer: Lexer, open_kind: TokenKind, + parse_fn: Callable[[Lexer], Node], + close_kind: TokenKind) -> List[Node]: + """Fetch any matching nodes, possibly none. + + Returns a possibly empty list of parse nodes, determined by the `parse_fn`. + This list begins with a lex token of `open_kind` and ends with a lex token + of `close_kind`. Advances the parser to the next lex token after the + closing token. + """ + expect(lexer, open_kind) + nodes: List[Node] = [] + append = nodes.append + while not skip(lexer, close_kind): + append(parse_fn(lexer)) + return nodes + + +def many_nodes(lexer: Lexer, open_kind: TokenKind, + parse_fn: Callable[[Lexer], Node], + close_kind: TokenKind) -> List[Node]: + """Fetch matching nodes, at least one. + + Returns a non-empty list of parse nodes, determined by the `parse_fn`. + This list begins with a lex token of `open_kind` and ends with a lex token + of `close_kind`. Advances the parser to the next lex token after the + closing token. + """ + expect(lexer, open_kind) + nodes = [parse_fn(lexer)] + append = nodes.append + while not skip(lexer, close_kind): + append(parse_fn(lexer)) + return nodes diff --git a/graphql/language/printer.py b/graphql/language/printer.py new file mode 100644 index 00000000..3f3c9c30 --- /dev/null +++ b/graphql/language/printer.py @@ -0,0 +1,279 @@ +from functools import wraps +from json import dumps +from typing import Optional, Sequence + +from .ast import Node, OperationType +from .visitor import visit, Visitor + +__all__ = ['print_ast'] + + +def print_ast(ast: Node): + """Convert an AST into a string. + + The conversion is done using a set of reasonable formatting rules. + """ + return visit(ast, PrintAstVisitor()) + + +def add_description(method): + """Decorator adding the description to the output of a visitor method.""" + @wraps(method) + def wrapped(self, node, *args): + return join([node.description, method(self, node, *args)], '\n') + return wrapped + + +# noinspection PyMethodMayBeStatic +class PrintAstVisitor(Visitor): + + def leave_name(self, node, *_args): + return node.value + + def leave_variable(self, node, *_args): + return f'${node.name}' + + # Document + + def leave_document(self, node, *_args): + return join(node.definitions, '\n\n') + '\n' + + def leave_operation_definition(self, node, *_args): + name, op, selection_set = node.name, node.operation, node.selection_set + var_defs = wrap('(', join(node.variable_definitions, ', '), ')') + directives = join(node.directives, ' ') + # Anonymous queries with no directives or variable definitions can use + # the query short form. + return join([op.value, join([name, var_defs]), + directives, selection_set], ' ') if ( + name or directives or var_defs or op != OperationType.QUERY + ) else selection_set + + def leave_variable_definition(self, node, *_args): + return f"{node.variable}: {node.type}{wrap(' = ', node.default_value)}" + + def leave_selection_set(self, node, *_args): + return block(node.selections) + + def leave_field(self, node, *_args): + return join([wrap('', node.alias, ': ') + node.name + + wrap('(', join(node.arguments, ', '), ')'), + join(node.directives, ' '), node.selection_set], ' ') + + def leave_argument(self, node, *_args): + return f'{node.name}: {node.value}' + + # Fragments + + def leave_fragment_spread(self, node, *_args): + return f"...{node.name}{wrap(' ', join(node.directives, ' '))}" + + def leave_inline_fragment(self, node, *_args): + return join(['...', wrap('on ', node.type_condition), + join(node.directives, ' '), node.selection_set], ' ') + + def leave_fragment_definition(self, node, *_args): + # Note: fragment variable definitions are experimental and may b + # changed or removed in the future. + return (f'fragment {node.name}' + f"{wrap('(', join(node.variable_definitions, ', '), ')')}" + f" on {node.type_condition}" + f" {wrap('', join(node.directives, ' '), ' ')}" + f'{node.selection_set}') + + # Value + + def leave_int_value(self, node, *_args): + return node.value + + def leave_float_value(self, node, *_args): + return node.value + + def leave_string_value(self, node, key, *_args): + if node.block: + return print_block_string(node.value, key == 'description') + return dumps(node.value) + + def leave_boolean_value(self, node, *_args): + return 'true' if node.value else 'false' + + def leave_null_value(self, _node, *_args): + return 'null' + + def leave_enum_value(self, node, *_args): + return node.value + + def leave_list_value(self, node, *_args): + return f"[{join(node.values, ', ')}]" + + def leave_object_value(self, node, *_args): + return f"{{{join(node.fields, ', ')}}}" + + def leave_object_field(self, node, *_args): + return f'{node.name}: {node.value}' + + # Directive + + def leave_directive(self, node, *_args): + return f"@{node.name}{wrap('(', join(node.arguments, ', '), ')')}" + + # Type + + def leave_named_type(self, node, *_args): + return node.name + + def leave_list_type(self, node, *_args): + return f'[{node.type}]' + + def leave_non_null_type(self, node, *_args): + return f'{node.type}!' + + # Type System Definitions + + def leave_schema_definition(self, node, *_args): + return join(['schema', join(node.directives, ' '), + block(node.operation_types)], ' ') + + def leave_operation_type_definition(self, node, *_args): + return f'{node.operation.value}: {node.type}' + + @add_description + def leave_scalar_type_definition(self, node, *_args): + return join(['scalar', node.name, join(node.directives, ' ')], ' ') + + @add_description + def leave_object_type_definition(self, node, *_args): + return join(['type', node.name, wrap('implements ', + join(node.interfaces, ' & ')), + join(node.directives, ' '), block(node.fields)], ' ') + + @add_description + def leave_field_definition(self, node, *_args): + args = node.arguments + args = (wrap('(\n', indent(join(args, '\n')), '\n)') + if any('\n' in arg for arg in args) + else wrap('(', join(args, ', '), ')')) + directives = wrap(' ', join(node.directives, ' ')) + return f"{node.name}{args}: {node.type}{directives}" + + @add_description + def leave_input_value_definition(self, node, *_args): + return join([f'{node.name}: {node.type}', + wrap('= ', node.default_value), + join(node.directives, ' ')], ' ') + + @add_description + def leave_interface_type_definition(self, node, *_args): + return join(['interface', node.name, + join(node.directives, ' '), block(node.fields)], ' ') + + @add_description + def leave_union_type_definition(self, node, *_args): + return join(['union', node.name, join(node.directives, ' '), + '= ' + join(node.types, ' | ') if node.types else ''], ' ') + + @add_description + def leave_enum_type_definition(self, node, *_args): + return join(['enum', node.name, join(node.directives, ' '), + block(node.values)], ' ') + + @add_description + def leave_enum_value_definition(self, node, *_args): + return join([node.name, join(node.directives, ' ')], ' ') + + @add_description + def leave_input_object_type_definition(self, node, *_args): + return join(['input', node.name, join(node.directives, ' '), + block(node.fields)], ' ') + + @add_description + def leave_directive_definition(self, node, *_args): + args = node.arguments + args = (wrap('(\n', indent(join(args, '\n')), '\n)') + if any('\n' in arg for arg in args) + else wrap('(', join(args, ', '), ')')) + locations = join(node.locations, ' | ') + return f'directive @{node.name}{args} on {locations}' + + def leave_schema_extension(self, node, *_args): + return join(['extend schema', join(node.directives, ' '), + block(node.operation_types)], ' ') + + def leave_scalar_type_extension(self, node, *_args): + return join(['extend scalar', node.name, join(node.directives, ' ')], + ' ') + + def leave_object_type_extension(self, node, *_args): + return join(['extend type', node.name, + wrap('implements ', join(node.interfaces, ' & ')), + join(node.directives, ' '), block(node.fields)], ' ') + + def leave_interface_type_extension(self, node, *_args): + return join(['extend interface', node.name, join(node.directives, ' '), + block(node.fields)], ' ') + + def leave_union_type_extension(self, node, *_args): + return join(['extend union', node.name, join(node.directives, ' '), + '= ' + join(node.types, ' | ') if node.types else ''], ' ') + + def leave_enum_type_extension(self, node, *_args): + return join(['extend enum', node.name, join(node.directives, ' '), + block(node.values)], ' ') + + def leave_input_object_type_extension(self, node, *_args): + return join(['extend input', node.name, join(node.directives, ' '), + block(node.fields)], ' ') + + +def print_block_string(value: str, is_description: bool=False) -> str: + """Print a block string. + + Prints a block string in the indented block form by adding a leading and + trailing blank line. However, if a block string starts with whitespace and + is a single-line, adding a leading blank line would strip that whitespace. + """ + escaped = value.replace('"""', '\\"""') + if value.startswith((' ', '\t')) and '\n' not in value: + if escaped.endswith('"'): + escaped += '\n' + return f'"""{escaped}"""' + else: + if not is_description: + escaped = indent(escaped) + return f'"""\n{escaped}\n"""' + + +def join(strings: Optional[Sequence[str]], separator: str='') -> str: + """Join strings in a given sequence. + + Return an empty string if it is None or empty, otherwise + join all items together separated by separator if provided. + """ + return separator.join(s for s in strings if s) if strings else '' + + +def block(strings: Sequence[str]) -> str: + """Return strings inside a block. + + Given a sequence of strings, return a string with each item on its own + line, wrapped in an indented "{ }" block. + """ + return '{\n' + indent(join(strings, '\n')) + '\n}' if strings else '' + + +def wrap(start: str, string: str, end: str='') -> str: + """Wrap string inside other strings at start and end. + + If the string is not None or empty, then wrap with start and end, + otherwise return an empty string. + """ + return f'{start}{string}{end}' if string else '' + + +def indent(string): + """Indent string with two spaces. + + If the string is not None or empty, add two spaces at the beginning + of every line inside the string. + """ + return ' ' + string.replace('\n', '\n ') if string else string diff --git a/graphql/language/source.py b/graphql/language/source.py new file mode 100644 index 00000000..f2672fdc --- /dev/null +++ b/graphql/language/source.py @@ -0,0 +1,47 @@ +from .location import SourceLocation + +__all__ = ['Source'] + + +class Source: + """A representation of source input to GraphQL.""" + + __slots__ = 'body', 'name', 'location_offset' + + def __init__(self, body: str, name: str=None, + location_offset: SourceLocation=None) -> None: + """Initialize source input. + + + `name` and `location_offset` are optional. They are useful for clients + who store GraphQL documents in source files; for example, if the + GraphQL input starts at line 40 in a file named Foo.graphql, it might + be useful for name to be "Foo.graphql" and location to be `(40, 0)`. + + line and column in location_offset are 1-indexed + """ + + self.body = body + self.name = name or 'GraphQL request' + if not location_offset: + location_offset = SourceLocation(1, 1) + elif not isinstance(location_offset, SourceLocation): + # noinspection PyProtectedMember,PyTypeChecker + location_offset = SourceLocation._make(location_offset) + if location_offset.line <= 0: + raise ValueError( + 'line in location_offset is 1-indexed and must be positive') + if location_offset.column <= 0: + raise ValueError( + 'column in location_offset is 1-indexed and must be positive') + self.location_offset = location_offset + + def get_location(self, position: int) -> SourceLocation: + lines = self.body[:position].splitlines() + if lines: + line = len(lines) + column = len(lines[-1]) + 1 + else: + line = 1 + column = 1 + return SourceLocation(line, column) diff --git a/graphql/language/visitor.py b/graphql/language/visitor.py new file mode 100644 index 00000000..91f6c481 --- /dev/null +++ b/graphql/language/visitor.py @@ -0,0 +1,378 @@ +from copy import copy +from typing import ( + TYPE_CHECKING, Any, Callable, List, NamedTuple, Sequence, Tuple, Union) + +from ..pyutils import snake_to_camel +from . import ast + +from .ast import Node + +if TYPE_CHECKING: + from ..utilities import TypeInfo # noqa: F401 + +__all__ = [ + 'Visitor', 'ParallelVisitor', 'TypeInfoVisitor', 'visit', + 'BREAK', 'SKIP', 'REMOVE', 'IDLE'] + + +# Special return values for the visitor function: +# Note that in GraphQL.js these are defined differently: +# BREAK = {}, SKIP = false, REMOVE = null, IDLE = undefined +BREAK, SKIP, REMOVE, IDLE = True, False, Ellipsis, None + +# Default map from visitor kinds to their traversable node attributes: +QUERY_DOCUMENT_KEYS = { + 'name': (), + + 'document': ('definitions',), + 'operation_definition': ( + 'name', 'variable_definitions', 'directives', 'selection_set'), + 'variable_definition': ('variable', 'type', 'default_value'), + 'variable': ('name',), + 'selection_set': ('selections',), + 'field': ('alias', 'name', 'arguments', 'directives', 'selection_set'), + 'argument': ('name', 'value'), + + 'fragment_spread': ('name', 'directives'), + 'inline_fragment': ('type_condition', 'directives', 'selection_set'), + 'fragment_definition': ( + # Note: fragment variable definitions are experimental and may be + # changed or removed in the future. + 'name', 'variable_definitions', + 'type_condition', 'directives', 'selection_set'), + 'int_value': (), + 'float_value': (), + 'string_value': (), + 'boolean_value': (), + 'enum_value': (), + 'list_value': ('values',), + 'object_value': ('fields',), + 'object_field': ('name', 'value'), + + 'directive': ('name', 'arguments'), + + 'named_type': ('name',), + 'list_type': ('type',), + 'non_null_type': ('type',), + + 'schema_definition': ('directives', 'operation_types',), + 'operation_type_definition': ('type',), + + 'scalar_type_definition': ('description', 'name', 'directives',), + 'object_type_definition': ( + 'description', 'name', 'interfaces', 'directives', 'fields'), + 'field_definition': ( + 'description', 'name', 'arguments', 'type', 'directives'), + 'input_value_definition': ( + 'description', 'name', 'type', 'default_value', 'directives'), + 'interface_type_definition': ( + 'description', 'name', 'directives', 'fields'), + 'union_type_definition': ('description', 'name', 'directives', 'types'), + 'enum_type_definition': ('description', 'name', 'directives', 'values'), + 'enum_value_definition': ('description', 'name', 'directives',), + 'input_object_type_definition': ( + 'description', 'name', 'directives', 'fields'), + + 'directive_definition': ('description', 'name', 'arguments', 'locations'), + + 'schema_extension': ('directives', 'operation_types'), + + 'scalar_type_extension': ('name', 'directives'), + 'object_type_extension': ('name', 'interfaces', 'directives', 'fields'), + 'interface_type_extension': ('name', 'directives', 'fields'), + 'union_type_extension': ('name', 'directives', 'types'), + 'enum_type_extension': ('name', 'directives', 'values'), + 'input_object_type_extension': ('name', 'directives', 'fields') +} + + +class Visitor: + """Visitor that walks through an AST. + + Visitors can define two generic methods "enter" and "leave". + The former will be called when a node is entered in the traversal, + the latter is called after visiting the node and its child nodes. + These methods have the following signature:: + + def enter(self, node, key, parent, path, ancestors): + # The return value has the following meaning: + # IDLE (None): no action + # SKIP: skip visiting this node + # BREAK: stop visiting altogether + # REMOVE: delete this node + # any other value: replace this node with the returned value + return + + def enter(self, node, key, parent, path, ancestors): + # The return value has the following meaning: + # IDLE (None) or SKIP: no action + # BREAK: stop visiting altogether + # REMOVE: delete this node + # any other value: replace this node with the returned value + return + + The parameters have the following meaning: + + :arg node: The current node being visiting. + :arg key: The index or key to this node from the parent node or Array. + :arg parent: the parent immediately above this node, which may be an Array. + :arg path: The key path to get to this node from the root node. + :arg ancestors: All nodes and Arrays visited before reaching parent + of this node. These correspond to array indices in `path`. + + Note: ancestors includes arrays which contain the parent of visited node. + + You can also define node kind specific methods by suffixing them + with an underscore followed by the kind of the node to be visited. + For instance, to visit field nodes, you would defined the methods + enter_field() and/or leave_field(), with the same signature as above. + If no kind specific method has been defined for a given node, the + generic method is called. + """ + + # Provide special return values as attributes + BREAK, SKIP, REMOVE, IDLE = BREAK, SKIP, REMOVE, IDLE + + def __init_subclass__(cls, **kwargs): + """Verify that all defined handlers are valid.""" + super().__init_subclass__(**kwargs) + for attr, val in cls.__dict__.items(): + if attr.startswith('_'): + continue + attr = attr.split('_', 1) + attr, kind = attr if len(attr) > 1 else (attr[0], None) + if attr in ('enter', 'leave'): + if kind: + name = snake_to_camel(kind) + 'Node' + try: + node_cls = getattr(ast, name) + if not issubclass(node_cls, Node): + raise AttributeError + except AttributeError: + raise AttributeError(f'Invalid AST node kind: {kind}') + + @classmethod + def get_visit_fn(cls, kind, is_leaving=False) -> Callable: + """Get the visit function for the given node kind and direction.""" + method = 'leave' if is_leaving else 'enter' + visit_fn = getattr(cls, f'{method}_{kind}', None) + if not visit_fn: + visit_fn = getattr(cls, method, None) + return visit_fn + + +class Stack(NamedTuple): + """A stack for the visit function.""" + + in_array: bool + idx: int + keys: Tuple[Node, ...] + edits: List[Tuple[Union[int, str], Node]] + prev: Any # 'Stack' (python/mypy/issues/731) + + +def visit(root: Node, visitor: Visitor, visitor_keys=None) -> Node: + """Visit each node in an AST. + + visit() will walk through an AST using a depth first traversal, calling + the visitor's enter methods at each node in the traversal, and calling the + leave methods after visiting that node and all of its child nodes. + + By returning different values from the enter and leave methods, + the behavior of the visitor can be altered, including skipping over + a sub-tree of the AST (by returning False), editing the AST by returning + a value or None to remove the value, or to stop the whole traversal + by returning BREAK. + + When using visit() to edit an AST, the original AST will not be modified, + and a new version of the AST with the changes applied will be returned + from the visit function. + + To customize the node attributes to be used for traversal, you can provide + a dictionary visitor_keys mapping node kinds to node attributes. + """ + if not isinstance(root, Node): + raise TypeError(f'Not an AST Node: {root!r}') + if not isinstance(visitor, Visitor): + raise TypeError(f'Not an AST Visitor class: {visitor!r}') + if visitor_keys is None: + visitor_keys = QUERY_DOCUMENT_KEYS + stack: Any = None + in_array = isinstance(root, list) + keys: Tuple[Node, ...] = (root,) + idx = -1 + edits: List[Any] = [] + parent: Any = None + path: List[Any] = [] + path_append = path.append + path_pop = path.pop + ancestors: List[Any] = [] + ancestors_append = ancestors.append + ancestors_pop = ancestors.pop + new_root = root + + while True: + idx += 1 + is_leaving = idx == len(keys) + is_edited = is_leaving and edits + if is_leaving: + key = path[-1] if ancestors else None + node: Any = parent + parent = ancestors_pop() if ancestors else None + if is_edited: + if in_array: + node = node[:] + else: + node = copy(node) + edit_offset = 0 + for edit_key, edit_value in edits: + if in_array: + edit_key -= edit_offset + if in_array and edit_value is REMOVE: + node.pop(edit_key) + edit_offset += 1 + else: + if isinstance(node, list): + node[edit_key] = edit_value + else: + setattr(node, edit_key, edit_value) + + idx = stack.idx + keys = stack.keys + edits = stack.edits + in_array = stack.in_array + stack = stack.prev + else: + if parent: + if in_array: + key = idx + node = parent[key] + else: + key = keys[idx] + node = getattr(parent, key, None) + else: + key = None + node = new_root + if node is None or node is REMOVE: + continue + if parent: + path_append(key) + + if isinstance(node, list): + result = None + else: + if not isinstance(node, Node): + raise TypeError(f'Not an AST Node: {node!r}') + visit_fn = visitor.get_visit_fn(node.kind, is_leaving) + if visit_fn: + result = visit_fn(visitor, node, key, parent, path, ancestors) + + if result is BREAK: + break + + if result is SKIP: + if not is_leaving: + path_pop() + continue + + elif result is not None: + edits.append((key, result)) + if not is_leaving: + if isinstance(result, Node): + node = result + else: + path_pop() + continue + else: + result = None + + if result is None and is_edited: + edits.append((key, node)) + + if is_leaving: + if path: + path_pop() + else: + stack = Stack(in_array, idx, keys, edits, stack) + in_array = isinstance(node, list) + keys = node if in_array else visitor_keys.get(node.kind, ()) + idx = -1 + edits = [] + if parent: + ancestors_append(parent) + parent = node + + if not stack: + break + + if edits: + new_root = edits[-1][1] + + return new_root + + +class ParallelVisitor(Visitor): + """A Visitor which delegates to many visitors to run in parallel. + + Each visitor will be visited for each node before moving on. + + If a prior visitor edits a node, no following visitors will see that node. + """ + + def __init__(self, visitors: Sequence[Visitor]) -> None: + """Create a new visitor from the given list of parallel visitors.""" + self.visitors = visitors + self.skipping: List[Any] = [None] * len(visitors) + + def enter(self, node, *args): + skipping = self.skipping + for i, visitor in enumerate(self.visitors): + if not skipping[i]: + fn = visitor.get_visit_fn(node.kind) + if fn: + result = fn(visitor, node, *args) + if result is SKIP: + skipping[i] = node + elif result == BREAK: + skipping[i] = BREAK + elif result is not None: + return result + + def leave(self, node, *args): + skipping = self.skipping + for i, visitor in enumerate(self.visitors): + if not skipping[i]: + fn = visitor.get_visit_fn(node.kind, is_leaving=True) + if fn: + result = fn(visitor, node, *args) + if result == BREAK: + skipping[i] = BREAK + elif result is not None and result is not SKIP: + return result + elif skipping[i] is node: + skipping[i] = None + + +class TypeInfoVisitor(Visitor): + """A visitor which maintains a provided TypeInfo.""" + + def __init__(self, type_info: 'TypeInfo', visitor: Visitor) -> None: + self.type_info = type_info + self.visitor = visitor + + def enter(self, node, *args): + self.type_info.enter(node) + fn = self.visitor.get_visit_fn(node.kind) + if fn: + result = fn(self.visitor, node, *args) + if result is not None: + self.type_info.leave(node) + if isinstance(result, ast.Node): + self.type_info.enter(result) + return result + + def leave(self, node, *args): + fn = self.visitor.get_visit_fn(node.kind, is_leaving=True) + result = fn(self.visitor, node, *args) if fn else None + self.type_info.leave(node) + return result diff --git a/graphql/pyutils/__init__.py b/graphql/pyutils/__init__.py new file mode 100644 index 00000000..17bbd760 --- /dev/null +++ b/graphql/pyutils/__init__.py @@ -0,0 +1,30 @@ +"""Python Utils + +This package contains dependency-free Python utility functions used +throughout the codebase. + +Each utility should belong in its own file and be the default export. + +These functions are not part of the module interface and are subject to change. +""" + +from .convert_case import camel_to_snake, snake_to_camel +from .cached_property import cached_property +from .contain_subset import contain_subset +from .dedent import dedent +from .event_emitter import EventEmitter, EventEmitterAsyncIterator +from .is_finite import is_finite +from .is_integer import is_integer +from .is_invalid import is_invalid +from .is_nullish import is_nullish +from .maybe_awaitable import MaybeAwaitable +from .or_list import or_list +from .quoted_or_list import quoted_or_list +from .suggestion_list import suggestion_list + +__all__ = [ + 'camel_to_snake', 'snake_to_camel', 'cached_property', + 'contain_subset', 'dedent', + 'EventEmitter', 'EventEmitterAsyncIterator', + 'is_finite', 'is_integer', 'is_invalid', 'is_nullish', 'MaybeAwaitable', + 'or_list', 'quoted_or_list', 'suggestion_list'] diff --git a/graphql/pyutils/cached_property.py b/graphql/pyutils/cached_property.py new file mode 100644 index 00000000..0727c194 --- /dev/null +++ b/graphql/pyutils/cached_property.py @@ -0,0 +1,24 @@ +# Code taken from https://github.com/bottlepy/bottle + +__all__ = ['cached_property'] + + +class CachedProperty: + """A cached property. + + A property that is only computed once per instance and then replaces itself + with an ordinary attribute. Deleting the attribute resets the property. + """ + + def __init__(self, func): + self.__doc__ = getattr(func, '__doc__') + self.func = func + + def __get__(self, obj, cls): + if obj is None: + return self + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value + + +cached_property = CachedProperty diff --git a/graphql/pyutils/contain_subset.py b/graphql/pyutils/contain_subset.py new file mode 100644 index 00000000..57bf5627 --- /dev/null +++ b/graphql/pyutils/contain_subset.py @@ -0,0 +1,34 @@ +__all__ = ['contain_subset'] + + +def contain_subset(actual, expected): + """Recursively check if actual collection contains an expected subset. + + This simulates the containSubset object properties matcher for Chai. + """ + if expected == actual: + return True + if isinstance(expected, list): + if not isinstance(actual, list): + return False + return all(any(contain_subset(actual_value, expected_value) + for actual_value in actual) for expected_value in expected) + if not isinstance(expected, dict): + return False + if not isinstance(actual, dict): + return False + for key, expected_value in expected.items(): + try: + actual_value = actual[key] + except KeyError: + return False + if callable(expected_value): + try: + if not expected_value(actual_value): + return False + except TypeError: + if not expected_value(): + return False + elif not contain_subset(actual_value, expected_value): + return False + return True diff --git a/graphql/pyutils/convert_case.py b/graphql/pyutils/convert_case.py new file mode 100644 index 00000000..84cf0427 --- /dev/null +++ b/graphql/pyutils/convert_case.py @@ -0,0 +1,25 @@ +# uses code from https://github.com/daveoncode/python-string-utils + +import re + +__all__ = ['camel_to_snake', 'snake_to_camel'] + +_re_camel_to_snake = re.compile(r'([a-z]|[A-Z]+)(?=[A-Z])') +_re_snake_to_camel = re.compile(r'(_)([a-z\d])') + + +def camel_to_snake(s): + """Convert from CamelCase to snake_case""" + return _re_camel_to_snake.sub(r'\1_', s).lower() + + +def snake_to_camel(s, upper=True): + """Convert from snake_case to CamelCase + + If upper is set, then convert to upper CamelCase, + otherwise the first character keeps its case. + """ + s = _re_snake_to_camel.sub(lambda m: m.group(2).upper(), s) + if upper: + s = s[:1].upper() + s[1:] + return s diff --git a/graphql/pyutils/dedent.py b/graphql/pyutils/dedent.py new file mode 100644 index 00000000..977f88d4 --- /dev/null +++ b/graphql/pyutils/dedent.py @@ -0,0 +1,12 @@ +from textwrap import dedent as _dedent + +__all__ = ['dedent'] + + +def dedent(text: str) -> str: + """Fix indentation of given text by removing leading spaces and tabs. + + Also removes leading newlines and trailing spaces and tabs, + but keeps trailing newlines. + """ + return _dedent(text.lstrip('\n').rstrip(' \t')) diff --git a/graphql/pyutils/event_emitter.py b/graphql/pyutils/event_emitter.py new file mode 100644 index 00000000..922427c6 --- /dev/null +++ b/graphql/pyutils/event_emitter.py @@ -0,0 +1,65 @@ +from typing import Callable, Dict, List + +from asyncio import Queue, ensure_future +from inspect import isawaitable + +from collections import defaultdict + +__all__ = ['EventEmitter', 'EventEmitterAsyncIterator'] + + +class EventEmitter: + """A very simple EventEmitter.""" + + def __init__(self, loop=None) -> None: + self.loop = loop + self.listeners: Dict[str, List[Callable]] = defaultdict(list) + + def add_listener(self, event_name: str, listener: Callable): + """Add a listener.""" + self.listeners[event_name].append(listener) + return self + + def remove_listener(self, event_name, listener): + """Removes a listener.""" + self.listeners[event_name].remove(listener) + return self + + def emit(self, event_name, *args, **kwargs): + """Emit an event.""" + listeners = list(self.listeners[event_name]) + if not listeners: + return False + for listener in listeners: + result = listener(*args, **kwargs) + if isawaitable(result): + ensure_future(result, loop=self.loop) + return True + + +class EventEmitterAsyncIterator: + """Create an AsyncIterator from an EventEmitter. + + Useful for mocking a PubSub system for tests. + """ + + def __init__(self, event_emitter: EventEmitter, event_name: str) -> None: + self.queue: Queue = Queue(loop=event_emitter.loop) + event_emitter.add_listener(event_name, self.queue.put) + self.remove_listener = lambda: event_emitter.remove_listener( + event_name, self.queue.put) + self.closed = False + + def __aiter__(self): + return self + + async def __anext__(self): + if self.closed: + raise StopAsyncIteration + return await self.queue.get() + + async def aclose(self): + self.remove_listener() + while not self.queue.empty(): + await self.queue.get() + self.closed = True diff --git a/graphql/pyutils/is_finite.py b/graphql/pyutils/is_finite.py new file mode 100644 index 00000000..77029382 --- /dev/null +++ b/graphql/pyutils/is_finite.py @@ -0,0 +1,10 @@ +from typing import Any +from math import isfinite + +__all__ = ['is_finite'] + + +def is_finite(value: Any) -> bool: + """Return true if a value is a finite number.""" + return isinstance(value, int) or ( + isinstance(value, float) and isfinite(value)) diff --git a/graphql/pyutils/is_integer.py b/graphql/pyutils/is_integer.py new file mode 100644 index 00000000..3f07e2b7 --- /dev/null +++ b/graphql/pyutils/is_integer.py @@ -0,0 +1,10 @@ +from typing import Any +from math import isfinite + +__all__ = ['is_integer'] + + +def is_integer(value: Any) -> bool: + """Return true if a value is an integer number.""" + return (isinstance(value, int) and not isinstance(value, bool)) or ( + isinstance(value, float) and isfinite(value) and int(value) == value) diff --git a/graphql/pyutils/is_invalid.py b/graphql/pyutils/is_invalid.py new file mode 100644 index 00000000..ed9d509e --- /dev/null +++ b/graphql/pyutils/is_invalid.py @@ -0,0 +1,10 @@ +from typing import Any + +from ..error import INVALID + +__all__ = ['is_invalid'] + + +def is_invalid(value: Any) -> bool: + """Return true if a value is undefined, or NaN.""" + return value is INVALID or value != value diff --git a/graphql/pyutils/is_nullish.py b/graphql/pyutils/is_nullish.py new file mode 100644 index 00000000..650a2504 --- /dev/null +++ b/graphql/pyutils/is_nullish.py @@ -0,0 +1,10 @@ +from typing import Any + +from ..error import INVALID + +__all__ = ['is_nullish'] + + +def is_nullish(value: Any) -> bool: + """Return true if a value is null, undefined, or NaN.""" + return value is None or value is INVALID or value != value diff --git a/graphql/pyutils/maybe_awaitable.py b/graphql/pyutils/maybe_awaitable.py new file mode 100644 index 00000000..0adab473 --- /dev/null +++ b/graphql/pyutils/maybe_awaitable.py @@ -0,0 +1,8 @@ +from typing import Awaitable, TypeVar, Union + +__all__ = ['MaybeAwaitable'] + + +T = TypeVar('T') + +MaybeAwaitable = Union[Awaitable[T], T] diff --git a/graphql/pyutils/or_list.py b/graphql/pyutils/or_list.py new file mode 100644 index 00000000..6ddacf96 --- /dev/null +++ b/graphql/pyutils/or_list.py @@ -0,0 +1,16 @@ +from typing import Optional, Sequence + +__all__ = ['or_list'] + + +MAX_LENGTH = 5 + + +def or_list(items: Sequence[str]) -> Optional[str]: + """Given [A, B, C] return 'A, B, or C'.""" + if not items: + raise TypeError('List must not be empty') + if len(items) == 1: + return items[0] + selected = items[:MAX_LENGTH] + return ', '.join(selected[:-1]) + ' or ' + selected[-1] diff --git a/graphql/pyutils/quoted_or_list.py b/graphql/pyutils/quoted_or_list.py new file mode 100644 index 00000000..731f6afd --- /dev/null +++ b/graphql/pyutils/quoted_or_list.py @@ -0,0 +1,13 @@ +from typing import Optional, List + +from .or_list import or_list + +__all__ = ['quoted_or_list'] + + +def quoted_or_list(items: List[str]) -> Optional[str]: + """Given [A, B, C] return "'A', 'B', or 'C'". + + Note: We use single quotes here, since these are also used by repr(). + """ + return or_list([f"'{item}'" for item in items]) diff --git a/graphql/pyutils/suggestion_list.py b/graphql/pyutils/suggestion_list.py new file mode 100644 index 00000000..ccb8025e --- /dev/null +++ b/graphql/pyutils/suggestion_list.py @@ -0,0 +1,62 @@ +from typing import Collection + +__all__ = ['suggestion_list'] + + +def suggestion_list(input_: str, options: Collection[str]): + """Get list with suggestions for a given input. + + Given an invalid input string and list of valid options, returns a filtered + list of valid options sorted based on their similarity with the input. + """ + options_by_distance = {} + input_threshold = len(input_) // 2 + + for option in options: + distance = lexical_distance(input_, option) + threshold = max(input_threshold, len(option) // 2, 1) + if distance <= threshold: + options_by_distance[option] = distance + + return sorted(options_by_distance, key=options_by_distance.get) + + +def lexical_distance(a_str: str, b_str: str) -> int: + """Computes the lexical distance between strings A and B. + + The "distance" between two strings is given by counting the minimum number + of edits needed to transform string A into string B. An edit can be an + insertion, deletion, or substitution of a single character, or a swap of + two adjacent characters. + + This distance can be useful for detecting typos in input or sorting + """ + if a_str == b_str: + return 0 + + a, b = a_str.lower(), b_str.lower() + a_len, b_len = len(a), len(b) + + # Any case change counts as a single edit + if a == b: + return 1 + + d = [[j for j in range(0, b_len + 1)]] + for i in range(1, a_len + 1): + d.append([i] + [0] * b_len) + + for i in range(1, a_len + 1): + for j in range(1, b_len + 1): + cost = 0 if a[i - 1] == b[j - 1] else 1 + + d[i][j] = min( + d[i - 1][j] + 1, + d[i][j - 1] + 1, + d[i - 1][j - 1] + cost) + + if (i > 1 and j > 1 and + a[i - 1] == b[j - 2] and + a[i - 2] == b[j - 1]): + d[i][j] = min(d[i][j], d[i - 2][j - 2] + cost) + + return d[a_len][b_len] diff --git a/graphql/subscription/__init__.py b/graphql/subscription/__init__.py new file mode 100644 index 00000000..8fb0823c --- /dev/null +++ b/graphql/subscription/__init__.py @@ -0,0 +1,9 @@ +"""GraphQL Subscription + +The `graphql.subscription` package is responsible for subscribing to updates +on specific data. +""" + +from .subscribe import subscribe, create_source_event_stream + +__all__ = ['subscribe', 'create_source_event_stream'] diff --git a/graphql/subscription/map_async_iterator.py b/graphql/subscription/map_async_iterator.py new file mode 100644 index 00000000..6d2c3b06 --- /dev/null +++ b/graphql/subscription/map_async_iterator.py @@ -0,0 +1,73 @@ +from inspect import isawaitable +from typing import AsyncIterable, Callable + +__all__ = ['MapAsyncIterator'] + + +class MapAsyncIterator: + """Map an AsyncIterable over a callback function. + + Given an AsyncIterable and a callback function, return an AsyncIterator + which produces values mapped via calling the callback function. + + When the resulting AsyncIterator is closed, the underlying AsyncIterable + will also be closed. + """ + + def __init__(self, iterable: AsyncIterable, callback: Callable, + reject_callback: Callable=None) -> None: + self.iterator = iterable.__aiter__() + self.callback = callback + self.reject_callback = reject_callback + self.error = None + + def __aiter__(self): + return self + + async def __anext__(self): + if self.error is not None: + raise self.error + try: + value = await self.iterator.__anext__() + except Exception as error: + if not self.reject_callback or isinstance(error, ( + StopAsyncIteration, GeneratorExit)): + raise + if self.error is not None: + raise self.error + result = self.reject_callback(error) + else: + if self.error is not None: + raise self.error + result = self.callback(value) + if isawaitable(result): + result = await result + if self.error is not None: + raise self.error + return result + + async def athrow(self, type_, value=None, traceback=None): + if self.error: + return + athrow = getattr(self.iterator, 'athrow', None) + if athrow: + await athrow(type_, value, traceback) + else: + error = type_ + if value is not None: + error = error(value) + if traceback is not None: + error = error.with_traceback(traceback) + self.error = error + + async def aclose(self): + if self.error: + return + aclose = getattr(self.iterator, 'aclose', None) + if aclose: + try: + await aclose() + except RuntimeError: + pass + else: + self.error = StopAsyncIteration diff --git a/graphql/subscription/subscribe.py b/graphql/subscription/subscribe.py new file mode 100644 index 00000000..2d296858 --- /dev/null +++ b/graphql/subscription/subscribe.py @@ -0,0 +1,155 @@ +from inspect import isawaitable +from typing import ( + Any, AsyncIterable, AsyncIterator, Awaitable, Dict, Union, cast) + +from ..error import GraphQLError, located_error +from ..execution.execute import ( + add_path, assert_valid_execution_arguments, execute, get_field_def, + response_path_as_list, ExecutionContext, ExecutionResult) +from ..language import DocumentNode +from ..type import GraphQLFieldResolver, GraphQLSchema +from ..utilities import get_operation_root_type +from .map_async_iterator import MapAsyncIterator + +__all__ = ['subscribe', 'create_source_event_stream'] + + +async def subscribe( + schema: GraphQLSchema, + document: DocumentNode, + root_value: Any=None, + context_value: Any=None, + variable_values: Dict[str, Any]=None, + operation_name: str = None, + field_resolver: GraphQLFieldResolver=None, + subscribe_field_resolver: GraphQLFieldResolver=None + ) -> Union[AsyncIterator[ExecutionResult], ExecutionResult]: + """Create a GraphQL subscription. + + Implements the "Subscribe" algorithm described in the GraphQL spec. + + Returns a coroutine object which yields either an AsyncIterator (if + successful) or an ExecutionResult (client error). The coroutine will raise + an exception if a server error occurs. + + If the client-provided arguments to this function do not result in a + compliant subscription, a GraphQL Response (ExecutionResult) with + descriptive errors and no data will be returned. + + If the the source stream could not be created due to faulty subscription + resolver logic or underlying systems, the coroutine object will yield a + single ExecutionResult containing `errors` and no `data`. + + If the operation succeeded, the coroutine will yield an AsyncIterator, + which yields a stream of ExecutionResults representing the response stream. + """ + try: + result_or_stream = await create_source_event_stream( + schema, document, root_value, context_value, variable_values, + operation_name, subscribe_field_resolver) + except GraphQLError as error: + return ExecutionResult(data=None, errors=[error]) + if isinstance(result_or_stream, ExecutionResult): + return result_or_stream + result_or_stream = cast(AsyncIterable, result_or_stream) + + async def map_source_to_response(payload): + """Map source to response. + + For each payload yielded from a subscription, map it over the normal + GraphQL `execute` function, with `payload` as the root_value. + This implements the "MapSourceToResponseEvent" algorithm described in + the GraphQL specification. The `execute` function provides the + "ExecuteSubscriptionEvent" algorithm, as it is nearly identical to the + "ExecuteQuery" algorithm, for which `execute` is also used. + """ + return execute(schema, document, payload, context_value, + variable_values, operation_name, field_resolver) + + return MapAsyncIterator(result_or_stream, map_source_to_response) + + +async def create_source_event_stream( + schema: GraphQLSchema, + document: DocumentNode, + root_value: Any=None, + context_value: Any=None, + variable_values: Dict[str, Any]=None, + operation_name: str = None, + field_resolver: GraphQLFieldResolver=None + ) -> Union[AsyncIterable[Any], ExecutionResult]: + """Create source even stream + + Implements the "CreateSourceEventStream" algorithm described in the + GraphQL specification, resolving the subscription source event stream. + + Returns a coroutine that yields an AsyncIterable. + + If the client-provided invalid arguments, the source stream could not be + created, or the resolver did not return an AsyncIterable, this function + will throw an error, which should be caught and handled by the caller. + + A Source Event Stream represents a sequence of events, each of which + triggers a GraphQL execution for that event. + + This may be useful when hosting the stateful subscription service in a + different process or machine than the stateless GraphQL execution engine, + or otherwise separating these two steps. For more on this, see the + "Supporting Subscriptions at Scale" information in the GraphQL spec. + """ + # If arguments are missing or incorrectly typed, this is an internal + # developer mistake which should throw an early error. + assert_valid_execution_arguments(schema, document, variable_values) + + # If a valid context cannot be created due to incorrect arguments, + # this will throw an error. + context = ExecutionContext.build( + schema, document, root_value, context_value, + variable_values, operation_name, field_resolver) + + # Return early errors if execution context failed. + if isinstance(context, list): + return ExecutionResult(data=None, errors=context) + + type_ = get_operation_root_type(schema, context.operation) + fields = context.collect_fields( + type_, context.operation.selection_set, {}, set()) + response_names = list(fields) + response_name = response_names[0] + field_nodes = fields[response_name] + field_node = field_nodes[0] + field_name = field_node.name.value + field_def = get_field_def(schema, type_, field_name) + + if not field_def: + raise GraphQLError( + f"The subscription field '{field_name}' is not defined.", + field_nodes) + + # Call the `subscribe()` resolver or the default resolver to produce an + # AsyncIterable yielding raw payloads. + resolve_fn = field_def.subscribe or context.field_resolver + resolve_fn = cast(GraphQLFieldResolver, resolve_fn) # help mypy + + path = add_path(None, response_name) + + info = context.build_resolve_info(field_def, field_nodes, type_, path) + + # resolve_field_value_or_error implements the "ResolveFieldEventStream" + # algorithm from GraphQL specification. It differs from + # "resolve_field_value" due to providing a different `resolve_fn`. + result = context.resolve_field_value_or_error( + field_def, field_nodes, resolve_fn, root_value, info) + event_stream = (await cast(Awaitable, result) if isawaitable(result) + else result) + # If event_stream is an Error, rethrow a located error. + if isinstance(event_stream, Exception): + raise located_error( + event_stream, field_nodes, response_path_as_list(path)) + + # Assert field returned an event stream, otherwise yield an error. + if isinstance(event_stream, AsyncIterable): + return cast(AsyncIterable, event_stream) + raise TypeError( + 'Subscription field must return AsyncIterable.' + f' Received: {event_stream!r}') diff --git a/graphql/type/__init__.py b/graphql/type/__init__.py new file mode 100644 index 00000000..372875cc --- /dev/null +++ b/graphql/type/__init__.py @@ -0,0 +1,115 @@ +"""GraphQL Type System + +The `graphql.type` package is responsible for defining GraphQL types +and schema. +""" + +from .schema import ( + # Predicate + is_schema, + # GraphQL Schema definition + GraphQLSchema) + +from .definition import ( + # Predicates + is_type, is_scalar_type, is_object_type, is_interface_type, + is_union_type, is_enum_type, is_input_object_type, is_list_type, + is_non_null_type, is_input_type, is_output_type, is_leaf_type, + is_composite_type, is_abstract_type, is_wrapping_type, + is_nullable_type, is_named_type, + # Assertions + assert_type, assert_scalar_type, assert_object_type, + assert_interface_type, assert_union_type, assert_enum_type, + assert_input_object_type, assert_list_type, assert_non_null_type, + assert_input_type, assert_output_type, assert_leaf_type, + assert_composite_type, assert_abstract_type, assert_wrapping_type, + assert_nullable_type, assert_named_type, + # Un-modifiers + get_nullable_type, get_named_type, + # Definitions + GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, + GraphQLUnionType, GraphQLEnumType, GraphQLInputObjectType, + # Type Wrappers + GraphQLList, GraphQLNonNull, + # Types + GraphQLType, GraphQLInputType, GraphQLOutputType, + GraphQLLeafType, GraphQLCompositeType, GraphQLAbstractType, + GraphQLWrappingType, GraphQLNullableType, GraphQLNamedType, + Thunk, GraphQLArgument, GraphQLArgumentMap, + GraphQLEnumValue, GraphQLEnumValueMap, + GraphQLField, GraphQLFieldMap, + GraphQLInputField, GraphQLInputFieldMap, + GraphQLScalarSerializer, GraphQLScalarValueParser, + GraphQLScalarLiteralParser, + # Resolvers + GraphQLFieldResolver, GraphQLTypeResolver, GraphQLIsTypeOfFn, + GraphQLResolveInfo, ResponsePath) + +from .directives import ( + # Predicate + is_directive, + # Directives Definition + GraphQLDirective, + # Built-in Directives defined by the Spec + is_specified_directive, + specified_directives, + GraphQLIncludeDirective, + GraphQLSkipDirective, + GraphQLDeprecatedDirective, + # Constant Deprecation Reason + DEFAULT_DEPRECATION_REASON) + +# Common built-in scalar instances. +from .scalars import ( + is_specified_scalar_type, specified_scalar_types, + GraphQLInt, GraphQLFloat, GraphQLString, + GraphQLBoolean, GraphQLID) + +from .introspection import ( + # "Enum" of Type Kinds + TypeKind, + # GraphQL Types for introspection. + is_introspection_type, introspection_types, + # Meta-field definitions. + SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef) + +from .validate import validate_schema, assert_valid_schema + +__all__ = [ + 'is_schema', 'GraphQLSchema', + 'is_type', 'is_scalar_type', 'is_object_type', 'is_interface_type', + 'is_union_type', 'is_enum_type', 'is_input_object_type', 'is_list_type', + 'is_non_null_type', 'is_input_type', 'is_output_type', 'is_leaf_type', + 'is_composite_type', 'is_abstract_type', 'is_wrapping_type', + 'is_nullable_type', 'is_named_type', + 'assert_type', 'assert_scalar_type', 'assert_object_type', + 'assert_interface_type', 'assert_union_type', 'assert_enum_type', + 'assert_input_object_type', 'assert_list_type', 'assert_non_null_type', + 'assert_input_type', 'assert_output_type', 'assert_leaf_type', + 'assert_composite_type', 'assert_abstract_type', 'assert_wrapping_type', + 'assert_nullable_type', 'assert_named_type', + 'get_nullable_type', 'get_named_type', + 'GraphQLScalarType', 'GraphQLObjectType', 'GraphQLInterfaceType', + 'GraphQLUnionType', 'GraphQLEnumType', + 'GraphQLInputObjectType', 'GraphQLInputType', 'GraphQLArgument', + 'GraphQLList', 'GraphQLNonNull', + 'GraphQLType', 'GraphQLInputType', 'GraphQLOutputType', + 'GraphQLLeafType', 'GraphQLCompositeType', 'GraphQLAbstractType', + 'GraphQLWrappingType', 'GraphQLNullableType', 'GraphQLNamedType', + 'Thunk', 'GraphQLArgument', 'GraphQLArgumentMap', + 'GraphQLEnumValue', 'GraphQLEnumValueMap', + 'GraphQLField', 'GraphQLFieldMap', + 'GraphQLInputField', 'GraphQLInputFieldMap', + 'GraphQLScalarSerializer', 'GraphQLScalarValueParser', + 'GraphQLScalarLiteralParser', + 'GraphQLFieldResolver', 'GraphQLTypeResolver', 'GraphQLIsTypeOfFn', + 'GraphQLResolveInfo', 'ResponsePath', + 'is_directive', 'is_specified_directive', 'specified_directives', + 'GraphQLDirective', 'GraphQLIncludeDirective', 'GraphQLSkipDirective', + 'GraphQLDeprecatedDirective', 'DEFAULT_DEPRECATION_REASON', + 'is_specified_scalar_type', 'specified_scalar_types', + 'GraphQLInt', 'GraphQLFloat', 'GraphQLString', + 'GraphQLBoolean', 'GraphQLID', + 'TypeKind', 'is_introspection_type', 'introspection_types', + 'SchemaMetaFieldDef', 'TypeMetaFieldDef', 'TypeNameMetaFieldDef', + 'validate_schema', 'assert_valid_schema'] diff --git a/graphql/type/definition.py b/graphql/type/definition.py new file mode 100644 index 00000000..4c9489d2 --- /dev/null +++ b/graphql/type/definition.py @@ -0,0 +1,1204 @@ +from enum import Enum +from typing import ( + Any, Callable, Dict, Generic, List, NamedTuple, Optional, + Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union, cast, overload) + +from ..error import GraphQLError, INVALID, InvalidType +from ..language import ( + EnumTypeDefinitionNode, EnumValueDefinitionNode, + EnumTypeExtensionNode, EnumValueNode, + FieldDefinitionNode, FieldNode, FragmentDefinitionNode, + InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode, + InputValueDefinitionNode, InterfaceTypeDefinitionNode, + InterfaceTypeExtensionNode, ObjectTypeDefinitionNode, + ObjectTypeExtensionNode, OperationDefinitionNode, + ScalarTypeDefinitionNode, ScalarTypeExtensionNode, + TypeDefinitionNode, TypeExtensionNode, + UnionTypeDefinitionNode, UnionTypeExtensionNode, ValueNode) +from ..pyutils import MaybeAwaitable, cached_property +from ..utilities.value_from_ast_untyped import value_from_ast_untyped + +if TYPE_CHECKING: + from .schema import GraphQLSchema # noqa: F401 + +__all__ = [ + 'is_type', 'is_scalar_type', 'is_object_type', 'is_interface_type', + 'is_union_type', 'is_enum_type', 'is_input_object_type', 'is_list_type', + 'is_non_null_type', 'is_input_type', 'is_output_type', 'is_leaf_type', + 'is_composite_type', 'is_abstract_type', 'is_wrapping_type', + 'is_nullable_type', 'is_named_type', + 'assert_type', 'assert_scalar_type', 'assert_object_type', + 'assert_interface_type', 'assert_union_type', 'assert_enum_type', + 'assert_input_object_type', 'assert_list_type', 'assert_non_null_type', + 'assert_input_type', 'assert_output_type', 'assert_leaf_type', + 'assert_composite_type', 'assert_abstract_type', 'assert_wrapping_type', + 'assert_nullable_type', 'assert_named_type', + 'get_nullable_type', 'get_named_type', + 'GraphQLAbstractType', 'GraphQLArgument', 'GraphQLArgumentMap', + 'GraphQLCompositeType', 'GraphQLEnumType', 'GraphQLEnumValue', + 'GraphQLEnumValueMap', 'GraphQLField', 'GraphQLFieldMap', + 'GraphQLFieldResolver', 'GraphQLInputField', 'GraphQLInputFieldMap', + 'GraphQLInputObjectType', 'GraphQLInputType', 'GraphQLIsTypeOfFn', + 'GraphQLLeafType', 'GraphQLList', 'GraphQLNamedType', + 'GraphQLNullableType', 'GraphQLNonNull', 'GraphQLResolveInfo', + 'GraphQLScalarType', 'GraphQLScalarSerializer', 'GraphQLScalarValueParser', + 'GraphQLScalarLiteralParser', 'GraphQLObjectType', 'GraphQLOutputType', + 'GraphQLInterfaceType', 'GraphQLType', 'GraphQLTypeResolver', + 'GraphQLUnionType', 'GraphQLWrappingType', + 'ResponsePath', 'Thunk'] + + +class GraphQLType: + """Base class for all GraphQL types""" + + # Note: We don't use slots for GraphQLType objects because memory + # considerations are not really important for the schema definition + # and it would make caching properties slower or more complicated. + + +# There are predicates for each kind of GraphQL type. + +def is_type(type_: Any) -> bool: + return isinstance(type_, GraphQLType) + + +def assert_type(type_: Any) -> GraphQLType: + if not is_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL type.') + return type_ + + +# These types wrap and modify other types + +GT = TypeVar('GT', bound=GraphQLType) + + +class GraphQLWrappingType(GraphQLType, Generic[GT]): + """Base class for all GraphQL wrapping types""" + + of_type: GT + + def __init__(self, type_: GT) -> None: + if not is_type(type_): + raise TypeError( + 'Can only create a wrapper for a GraphQLType, but got:' + f' {type_}.') + self.of_type = type_ + + +def is_wrapping_type(type_: Any) -> bool: + return isinstance(type_, GraphQLWrappingType) + + +def assert_wrapping_type(type_: Any) -> GraphQLWrappingType: + if not is_wrapping_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL wrapping type.') + return type_ + + +# These named types do not include modifiers like List or NonNull. + +class GraphQLNamedType(GraphQLType): + """Base class for all GraphQL named types""" + + name: str + description: Optional[str] + ast_node: Optional[TypeDefinitionNode] + extension_ast_nodes: Optional[Tuple[TypeExtensionNode]] + + def __init__(self, name: str, description: str=None, + ast_node: TypeDefinitionNode=None, + extension_ast_nodes: Sequence[TypeExtensionNode]=None + ) -> None: + if not name: + raise TypeError('Must provide name.') + if not isinstance(name, str): + raise TypeError('The name must be a string.') + if description is not None and not isinstance(description, str): + raise TypeError('The description must be a string.') + if ast_node and not isinstance(ast_node, TypeDefinitionNode): + raise TypeError( + f'{name} AST node must be a TypeDefinitionNode.') + if extension_ast_nodes: + if isinstance(extension_ast_nodes, list): + extension_ast_nodes = tuple(extension_ast_nodes) + if not isinstance(extension_ast_nodes, tuple): + raise TypeError( + f'{name} extension AST nodes must be a list/tuple.') + if not all(isinstance(node, TypeExtensionNode) + for node in extension_ast_nodes): + raise TypeError( + f'{name} extension AST nodes must be TypeExtensionNode.') + self.name = name + self.description = description + self.ast_node = ast_node + self.extension_ast_nodes = extension_ast_nodes # type: ignore + + def __str__(self): + return self.name + + def __repr__(self): + return f'<{self.__class__.__name__}({self})>' + + +def is_named_type(type_: Any) -> bool: + return isinstance(type_, GraphQLNamedType) + + +def assert_named_type(type_: Any) -> GraphQLNamedType: + if not is_named_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL named type.') + return type_ + + +@overload +def get_named_type(type_: None) -> None: + ... + + +@overload # noqa: F811 (pycqa/flake8#423) +def get_named_type(type_: GraphQLType) -> GraphQLNamedType: + ... + + +def get_named_type(type_): # noqa: F811 + """Unwrap possible wrapping type""" + if type_: + unwrapped_type = type_ + while is_wrapping_type(unwrapped_type): + unwrapped_type = cast(GraphQLWrappingType, unwrapped_type) + unwrapped_type = unwrapped_type.of_type + return cast(GraphQLNamedType, unwrapped_type) + return None + + +def resolve_thunk(thunk: Any) -> Any: + """Resolve the given thunk. + + Used while defining GraphQL types to allow for circular references in + otherwise immutable type definitions. + """ + return thunk() if callable(thunk) else thunk + + +def default_value_parser(value: Any) -> Any: + return value + + +# Unfortunately these types cannot be specified any better in Python: +GraphQLScalarSerializer = Callable +GraphQLScalarValueParser = Callable +GraphQLScalarLiteralParser = Callable + + +class GraphQLScalarType(GraphQLNamedType): + """Scalar Type Definition + + The leaf values of any request and input values to arguments are + Scalars (or Enums) and are defined with a name and a series of functions + used to parse input from ast or variables and to ensure validity. + + If a type's serialize function does not return a value (i.e. it returns + `None`), then no error will be included in the response. + + Example: + + def serialize_odd(value): + if value % 2 == 1: + return value + + odd_type = GraphQLScalarType('Odd', serialize=serialize_odd) + + """ + + # Serializes an internal value to include in a response. + serialize: GraphQLScalarSerializer + # Parses an externally provided value to use as an input. + parseValue: GraphQLScalarValueParser + # Parses an externally provided literal value to use as an input. + # Takes a dictionary of variables as an optional second argument. + parseLiteral: GraphQLScalarLiteralParser + + ast_node: Optional[ScalarTypeDefinitionNode] + extension_ast_nodes: Optional[Tuple[ScalarTypeExtensionNode]] + + def __init__(self, name: str, serialize: GraphQLScalarSerializer, + description: str=None, + parse_value: GraphQLScalarValueParser=None, + parse_literal: GraphQLScalarLiteralParser=None, + ast_node: ScalarTypeDefinitionNode=None, + extension_ast_nodes: Sequence[ScalarTypeExtensionNode]=None + ) -> None: + super().__init__( + name=name, description=description, + ast_node=ast_node, extension_ast_nodes=extension_ast_nodes) + if not callable(serialize): + raise TypeError( + f"{name} must provide 'serialize' function." + ' If this custom Scalar is also used as an input type,' + " ensure 'parse_value' and 'parse_literal' functions" + ' are also provided.') + if parse_value is not None or parse_literal is not None: + if not callable(parse_value) or not callable(parse_literal): + raise TypeError( + f'{name} must provide' + " both 'parse_value' and 'parse_literal' functions.") + if ast_node and not isinstance(ast_node, ScalarTypeDefinitionNode): + raise TypeError( + f'{name} AST node must be a ScalarTypeDefinitionNode.') + if extension_ast_nodes and not all( + isinstance(node, ScalarTypeExtensionNode) + for node in extension_ast_nodes): + raise TypeError( + f'{name} extension AST nodes' + ' must be ScalarTypeExtensionNode.') + self.serialize = serialize # type: ignore + self.parse_value = parse_value or default_value_parser + self.parse_literal = parse_literal or value_from_ast_untyped + + +def is_scalar_type(type_: Any) -> bool: + return isinstance(type_, GraphQLScalarType) + + +def assert_scalar_type(type_: Any) -> GraphQLScalarType: + if not is_scalar_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL Scalar type.') + return type_ + + +GraphQLArgumentMap = Dict[str, 'GraphQLArgument'] + + +class GraphQLField: + """Definition of a GraphQL field""" + + type: 'GraphQLOutputType' + args: Dict[str, 'GraphQLArgument'] + resolve: Optional['GraphQLFieldResolver'] + subscribe: Optional['GraphQLFieldResolver'] + description: Optional[str] + deprecation_reason: Optional[str] + ast_node: Optional[FieldDefinitionNode] + + def __init__(self, type_: 'GraphQLOutputType', + args: GraphQLArgumentMap=None, + resolve: 'GraphQLFieldResolver'=None, + subscribe: 'GraphQLFieldResolver'=None, + description: str=None, deprecation_reason: str=None, + ast_node: FieldDefinitionNode=None) -> None: + if not is_output_type(type_): + raise TypeError('Field type must be an output type.') + if args is None: + args = {} + elif not isinstance(args, dict): + raise TypeError( + 'Field args must be a dict with argument names as keys.') + elif not all(isinstance(value, GraphQLArgument) or is_input_type(value) + for value in args.values()): + raise TypeError( + 'Field args must be GraphQLArgument or input type objects.') + else: + args = {name: cast(GraphQLArgument, value) + if isinstance(value, GraphQLArgument) + else GraphQLArgument(cast(GraphQLInputType, value)) + for name, value in args.items()} + if resolve is not None and not callable(resolve): + raise TypeError( + 'Field resolver must be a function if provided, ' + f' but got: {resolve!r}.') + if description is not None and not isinstance(description, str): + raise TypeError('The description must be a string.') + if deprecation_reason is not None and not isinstance( + deprecation_reason, str): + raise TypeError('The deprecation reason must be a string.') + if ast_node and not isinstance(ast_node, FieldDefinitionNode): + raise TypeError('Field AST node must be a FieldDefinitionNode.') + self.type = type_ + self.args = args or {} + self.resolve = resolve + self.subscribe = subscribe + self.deprecation_reason = deprecation_reason + self.description = description + self.ast_node = ast_node + + def __eq__(self, other): + return (self is other or ( + isinstance(other, GraphQLField) and + self.type == other.type and + self.args == other.args and + self.resolve == other.resolve and + self.description == other.description and + self.deprecation_reason == other.deprecation_reason)) + + @property + def is_deprecated(self) -> bool: + return bool(self.deprecation_reason) + + +class ResponsePath(NamedTuple): + + prev: Any # Optional['ResponsePath'] (python/mypy/issues/731)) + key: Union[str, int] + + +class GraphQLResolveInfo(NamedTuple): + """Collection of information passed to the resolvers. + + This is always passed as the first argument to the resolvers. + + Note that contrary to the JavaScript implementation, the context + (commonly used to represent an authenticated user, or request-specific + caches) is included here and not passed as an additional argument. + """ + + field_name: str + field_nodes: List[FieldNode] + return_type: 'GraphQLOutputType' + parent_type: 'GraphQLObjectType' + path: ResponsePath + schema: 'GraphQLSchema' + fragments: Dict[str, FragmentDefinitionNode] + root_value: Any + operation: OperationDefinitionNode + variable_values: Dict[str, Any] + context: Any + + +# Note: Contrary to the Javascript implementation of GraphQLFieldResolver, +# the context is passed as part of the GraphQLResolveInfo and any arguments +# are passed individually as keyword arguments. +GraphQLFieldResolverWithoutArgs = Callable[[Any, GraphQLResolveInfo], Any] +# Unfortunately there is currently no syntax to indicate optional or keyword +# arguments in Python, so we also allow any other Callable as a workaround: +GraphQLFieldResolver = Callable[..., Any] + +# Note: Contrary to the Javascript implementation of GraphQLTypeResolver, +# the context is passed as part of the GraphQLResolveInfo: +GraphQLTypeResolver = Callable[ + [Any, GraphQLResolveInfo], MaybeAwaitable[Union['GraphQLObjectType', str]]] + +# Note: Contrary to the Javascript implementation of GraphQLIsTypeOfFn, +# the context is passed as part of the GraphQLResolveInfo: +GraphQLIsTypeOfFn = Callable[ + [Any, GraphQLResolveInfo], MaybeAwaitable[bool]] + + +class GraphQLArgument: + """Definition of a GraphQL argument""" + + type: 'GraphQLInputType' + default_value: Any + description: Optional[str] + ast_node: Optional[InputValueDefinitionNode] + + def __init__(self, type_: 'GraphQLInputType', default_value: Any=INVALID, + description: str=None, + ast_node: InputValueDefinitionNode=None) -> None: + if not is_input_type(type_): + raise TypeError(f'Argument type must be a GraphQL input type.') + if description is not None and not isinstance(description, str): + raise TypeError('The description must be a string.') + if ast_node and not isinstance(ast_node, InputValueDefinitionNode): + raise TypeError( + 'Argument AST node must be an InputValueDefinitionNode.') + self.type = type_ + self.default_value = default_value + self.description = description + self.ast_node = ast_node + + def __eq__(self, other): + return (self is other or ( + isinstance(other, GraphQLArgument) and + self.type == other.type and + self.default_value == other.default_value and + self.description == other.description)) + + +T = TypeVar('T') +Thunk = Union[Callable[[], T], T] + +GraphQLFieldMap = Dict[str, GraphQLField] +GraphQLInterfaceList = Sequence['GraphQLInterfaceType'] + + +class GraphQLObjectType(GraphQLNamedType): + """Object Type Definition + + Almost all of the GraphQL types you define will be object types. + Object types have a name, but most importantly describe their fields. + + Example:: + + AddressType = GraphQLObjectType('Address', { + 'street': GraphQLField(GraphQLString), + 'number': GraphQLField(GraphQLInt), + 'formatted': GraphQLField(GraphQLString, + lambda obj, info, **args: f'{obj.number} {obj.street}') + }) + + When two types need to refer to each other, or a type needs to refer to + itself in a field, you can use a lambda function with no arguments (a + so-called "thunk") to supply the fields lazily. + + Example:: + + PersonType = GraphQLObjectType('Person', lambda: { + 'name': GraphQLField(GraphQLString), + 'bestFriend': GraphQLField(PersonType) + }) + + """ + + is_type_of: Optional[GraphQLIsTypeOfFn] + ast_node: Optional[ObjectTypeDefinitionNode] + extension_ast_nodes: Optional[Tuple[ObjectTypeExtensionNode]] + + def __init__(self, name: str, + fields: Thunk[GraphQLFieldMap], + interfaces: Thunk[GraphQLInterfaceList]=None, + is_type_of: GraphQLIsTypeOfFn=None, description: str=None, + ast_node: ObjectTypeDefinitionNode=None, + extension_ast_nodes: Sequence[ObjectTypeExtensionNode]=None + ) -> None: + super().__init__( + name=name, description=description, + ast_node=ast_node, extension_ast_nodes=extension_ast_nodes) + if is_type_of is not None and not callable(is_type_of): + raise TypeError( + f"{name} must provide 'is_type_of' as a function," + f' but got: {is_type_of!r}.') + if ast_node and not isinstance(ast_node, ObjectTypeDefinitionNode): + raise TypeError( + f'{name} AST node must be an ObjectTypeDefinitionNode.') + if extension_ast_nodes and not all( + isinstance(node, ObjectTypeExtensionNode) + for node in extension_ast_nodes): + raise TypeError( + f'{name} extension AST nodes' + ' must be ObjectTypeExtensionNodes.') + self._fields = fields + self._interfaces = interfaces + self.is_type_of = is_type_of + + @cached_property + def fields(self) -> GraphQLFieldMap: + """Get provided fields, wrapping them as GraphQLFields if needed.""" + try: + fields = resolve_thunk(self._fields) + except GraphQLError: + raise + except Exception as error: + raise TypeError(f'{self.name} fields cannot be resolved: {error}') + if not isinstance(fields, dict) or not all( + isinstance(key, str) for key in fields): + raise TypeError( + f'{self.name} fields must be a dict with field names as keys' + ' or a function which returns such an object.') + if not all(isinstance(value, GraphQLField) or is_output_type(value) + for value in fields.values()): + raise TypeError( + f'{self.name} fields must be' + ' GraphQLField or output type objects.') + return {name: value if isinstance(value, GraphQLField) + else GraphQLField(value) + for name, value in fields.items()} + + @cached_property + def interfaces(self) -> GraphQLInterfaceList: + """Get provided interfaces.""" + try: + interfaces = resolve_thunk(self._interfaces) + except GraphQLError: + raise + except Exception as error: + raise TypeError( + f'{self.name} interfaces cannot be resolved: {error}') + if interfaces is None: + interfaces = [] + if not isinstance(interfaces, (list, tuple)): + raise TypeError( + f'{self.name} interfaces must be a list/tuple' + ' or a function which returns a list/tuple.') + if not all(isinstance(value, GraphQLInterfaceType) + for value in interfaces): + raise TypeError( + f'{self.name} interfaces must be GraphQLInterface objects.') + return interfaces[:] + + +def is_object_type(type_: Any) -> bool: + return isinstance(type_, GraphQLObjectType) + + +def assert_object_type(type_: Any) -> GraphQLObjectType: + if not is_object_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL Object type.') + return type_ + + +class GraphQLInterfaceType(GraphQLNamedType): + """Interface Type Definition + + When a field can return one of a heterogeneous set of types, a Interface + type is used to describe what types are possible, what fields are in common + across all types, as well as a function to determine which type is actually + used when the field is resolved. + + Example:: + + EntityType = GraphQLInterfaceType('Entity', { + 'name': GraphQLField(GraphQLString), + }) + """ + + resolve_type: Optional[GraphQLTypeResolver] + ast_node: Optional[InterfaceTypeDefinitionNode] + extension_ast_nodes: Optional[Tuple[InterfaceTypeExtensionNode]] + + def __init__(self, name: str, fields: Thunk[GraphQLFieldMap]=None, + resolve_type: GraphQLTypeResolver=None, + description: str=None, + ast_node: InterfaceTypeDefinitionNode=None, + extension_ast_nodes: Sequence[InterfaceTypeExtensionNode]=None + ) -> None: + super().__init__( + name=name, description=description, + ast_node=ast_node, extension_ast_nodes=extension_ast_nodes) + if resolve_type is not None and not callable(resolve_type): + raise TypeError( + f"{name} must provide 'resolve_type' as a function," + f' but got: {resolve_type!r}.') + if ast_node and not isinstance( + ast_node, InterfaceTypeDefinitionNode): + raise TypeError( + f'{name} AST node must be an InterfaceTypeDefinitionNode.') + if extension_ast_nodes and not all(isinstance( + node, InterfaceTypeExtensionNode) + for node in extension_ast_nodes): + raise TypeError( + f'{name} extension AST nodes' + ' must be InterfaceTypeExtensionNodes.') + self._fields = fields + self.resolve_type = resolve_type + self.description = description + + @cached_property + def fields(self) -> GraphQLFieldMap: + """Get provided fields, wrapping them as GraphQLFields if needed.""" + try: + fields = resolve_thunk(self._fields) + except GraphQLError: + raise + except Exception as error: + raise TypeError(f'{self.name} fields cannot be resolved: {error}') + if not isinstance(fields, dict) or not all( + isinstance(key, str) for key in fields): + raise TypeError( + f'{self.name} fields must be a dict with field names as keys' + ' or a function which returns such an object.') + if not all(isinstance(value, GraphQLField) or is_output_type(value) + for value in fields.values()): + raise TypeError( + f'{self.name} fields must be' + ' GraphQLField or output type objects.') + return {name: value if isinstance(value, GraphQLField) + else GraphQLField(value) + for name, value in fields.items()} + + +def is_interface_type(type_: Any) -> bool: + return isinstance(type_, GraphQLInterfaceType) + + +def assert_interface_type(type_: Any) -> GraphQLInterfaceType: + if not is_interface_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL Interface type.') + return type_ + + +GraphQLTypeList = Sequence[GraphQLObjectType] + + +class GraphQLUnionType(GraphQLNamedType): + """Union Type Definition + + When a field can return one of a heterogeneous set of types, a Union type + is used to describe what types are possible as well as providing a function + to determine which type is actually used when the field is resolved. + + Example: + + class PetType(GraphQLUnionType): + name = 'Pet' + types = [DogType, CatType] + + def resolve_type(self, value): + if isinstance(value, Dog): + return DogType() + if isinstance(value, Cat): + return CatType() + """ + + resolve_type: Optional[GraphQLFieldResolver] + ast_node: Optional[UnionTypeDefinitionNode] + extension_ast_nodes: Optional[Tuple[UnionTypeExtensionNode]] + + def __init__(self, name, types: Thunk[GraphQLTypeList], + resolve_type: GraphQLFieldResolver=None, + description: str=None, + ast_node: UnionTypeDefinitionNode=None, + extension_ast_nodes: Sequence[UnionTypeExtensionNode]=None + ) -> None: + super().__init__( + name=name, description=description, + ast_node=ast_node, extension_ast_nodes=extension_ast_nodes) + if resolve_type is not None and not callable(resolve_type): + raise TypeError( + f"{name} must provide 'resolve_type' as a function," + f' but got: {resolve_type!r}.') + if ast_node and not isinstance(ast_node, UnionTypeDefinitionNode): + raise TypeError( + f'{name} AST node must be a UnionTypeDefinitionNode.') + if extension_ast_nodes and not all( + isinstance(node, UnionTypeExtensionNode) + for node in extension_ast_nodes): + raise TypeError( + f'{name} extension AST nodes must be UnionTypeExtensionNode.') + self._types = types + self.resolve_type = resolve_type + + @cached_property + def types(self) -> GraphQLTypeList: + """Get provided types.""" + try: + types = resolve_thunk(self._types) + except GraphQLError: + raise + except Exception as error: + raise TypeError(f'{self.name} types cannot be resolved: {error}') + if types is None: + types = [] + if not isinstance(types, (list, tuple)): + raise TypeError( + f'{self.name} types must be a list/tuple' + ' or a function which returns a list/tuple.') + if not all(isinstance(value, GraphQLObjectType) for value in types): + raise TypeError( + f'{self.name} types must be GraphQLObjectType objects.') + return types[:] + + +def is_union_type(type_: Any) -> bool: + return isinstance(type_, GraphQLUnionType) + + +def assert_union_type(type_: Any) -> GraphQLUnionType: + if not is_union_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL Union type.') + return type_ + + +GraphQLEnumValueMap = Dict[str, 'GraphQLEnumValue'] + + +class GraphQLEnumType(GraphQLNamedType): + """Enum Type Definition + + Some leaf values of requests and input values are Enums. GraphQL serializes + Enum values as strings, however internally Enums can be represented by any + kind of type, often integers. They can also be provided as a Python Enum. + + Example:: + + RGBType = GraphQLEnumType('RGB', { + 'RED': 0, + 'GREEN': 1, + 'BLUE': 2 + }) + + Example using a Python Enum:: + + class RGBEnum(enum.Enum): + RED = 0 + GREEN = 1 + BLUE = 2 + + RGBType = GraphQLEnumType('RGB', enum.Enum) + + Instead of raw values, you can also specify GraphQLEnumValue objects + with more detail like description or deprecation information. + + Note: If a value is not provided in a definition, the name of the enum + value will be used as its internal value when the value is serialized. + """ + + values: GraphQLEnumValueMap + ast_node: Optional[EnumTypeDefinitionNode] + extension_ast_nodes: Optional[Tuple[EnumTypeExtensionNode]] + + def __init__(self, name: str, + values: Union[GraphQLEnumValueMap, + Dict[str, Any], Type[Enum]], + description: str=None, + ast_node: EnumTypeDefinitionNode=None, + extension_ast_nodes: Sequence[EnumTypeExtensionNode]=None + ) -> None: + super().__init__( + name=name, description=description, + ast_node=ast_node, extension_ast_nodes=extension_ast_nodes) + try: # check for enum + values = cast(Enum, values).__members__ # type: ignore + except AttributeError: + if not isinstance(values, dict) or not all( + isinstance(name, str) for name in values): + try: + # noinspection PyTypeChecker + values = dict(values) # type: ignore + except (TypeError, ValueError): + raise TypeError( + f'{name} values must be an Enum or a dict' + ' with value names as keys.') + values = cast(Dict, values) + else: + values = cast(Dict, values) + values = {key: value.value for key, value in values.items()} + values = {key: value if isinstance(value, GraphQLEnumValue) else + GraphQLEnumValue(value) for key, value in values.items()} + if ast_node and not isinstance(ast_node, EnumTypeDefinitionNode): + raise TypeError( + f'{name} AST node must be an EnumTypeDefinitionNode.') + if extension_ast_nodes and not all( + isinstance(node, EnumTypeExtensionNode) + for node in extension_ast_nodes): + raise TypeError( + f'{name} extension AST nodes must be EnumTypeExtensionNode.') + self.values = values + + @cached_property + def _value_lookup(self) -> Dict[Any, str]: + # use first value or name as lookup + lookup: Dict[Any, str] = {} + for name, enum_value in self.values.items(): + value = enum_value.value + if value is None: + value = name + try: + if value not in lookup: + lookup[value] = name + except TypeError: + pass # ignore unhashable values + return lookup + + def serialize(self, value: Any) -> Union[str, None, InvalidType]: + try: + return self._value_lookup.get(value, INVALID) + except TypeError: # unhashable value + for enum_name, enum_value in self.values.items(): + if enum_value.value == value: + return enum_name + return INVALID + + def parse_value(self, value: str) -> Any: + if isinstance(value, str): + try: + enum_value = self.values[value] + except KeyError: + return INVALID + if enum_value.value is None: + return value + return enum_value.value + return INVALID + + def parse_literal( + self, value_node: ValueNode, + _variables: Dict[str, Any]=None) -> Any: + # Note: variables will be resolved before calling this method. + if isinstance(value_node, EnumValueNode): + value = value_node.value + try: + enum_value = self.values[value] + except KeyError: + return INVALID + if enum_value.value is None: + return value + return enum_value.value + return INVALID + + +def is_enum_type(type_: Any) -> bool: + return isinstance(type_, GraphQLEnumType) + + +def assert_enum_type(type_: Any) -> GraphQLEnumType: + if not is_enum_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL Enum type.') + return type_ + + +class GraphQLEnumValue: + + value: Any + description: Optional[str] + deprecation_reason: Optional[str] + ast_node: Optional[EnumValueDefinitionNode] + + def __init__(self, value: Any=None, description: str=None, + deprecation_reason: str=None, + ast_node: EnumValueDefinitionNode=None) -> None: + if description is not None and not isinstance(description, str): + raise TypeError('The description must be a string.') + if deprecation_reason is not None and not isinstance( + deprecation_reason, str): + raise TypeError('The deprecation reason must be a string.') + if ast_node and not isinstance(ast_node, EnumValueDefinitionNode): + raise TypeError( + 'AST node must be an EnumValueDefinitionNode.') + self.value = value + self.description = description + self.deprecation_reason = deprecation_reason + self.ast_node = ast_node + + def __eq__(self, other): + return (self is other or ( + isinstance(other, GraphQLEnumValue) and + self.value == other.value and + self.description == other.description and + self.deprecation_reason == other.deprecation_reason)) + + @property + def is_deprecated(self) -> bool: + return bool(self.deprecation_reason) + + +GraphQLInputFieldMap = Dict[str, 'GraphQLInputField'] + + +class GraphQLInputObjectType(GraphQLNamedType): + """Input Object Type Definition + + An input object defines a structured collection of fields which may be + supplied to a field argument. + + Using `NonNull` will ensure that a value must be provided by the query + + Example:: + + NonNullFloat = GraphQLNonNull(GraphQLFloat()) + + class GeoPoint(GraphQLInputObjectType): + name = 'GeoPoint' + fields = { + 'lat': GraphQLInputField(NonNullFloat), + 'lon': GraphQLInputField(NonNullFloat), + 'alt': GraphQLInputField( + GraphQLFloat(), default_value=0) + } + """ + + ast_node: Optional[InputObjectTypeDefinitionNode] + extension_ast_nodes: Optional[Tuple[InputObjectTypeExtensionNode]] + + def __init__(self, name: str, fields: Thunk[GraphQLInputFieldMap], + description: str=None, + ast_node: InputObjectTypeDefinitionNode=None, + extension_ast_nodes: Sequence[ + InputObjectTypeExtensionNode]=None) -> None: + super().__init__( + name=name, description=description, + ast_node=ast_node, extension_ast_nodes=extension_ast_nodes) + if ast_node and not isinstance( + ast_node, InputObjectTypeDefinitionNode): + raise TypeError( + f'{name} AST node must be an InputObjectTypeDefinitionNode.') + if extension_ast_nodes and not all( + isinstance(node, InputObjectTypeExtensionNode) + for node in extension_ast_nodes): + raise TypeError( + f'{name} extension AST nodes' + ' must be InputObjectTypeExtensionNode.') + self._fields = fields + + @cached_property + def fields(self) -> GraphQLInputFieldMap: + """Get provided fields, wrap them as GraphQLInputField if needed.""" + try: + fields = resolve_thunk(self._fields) + except GraphQLError: + raise + except Exception as error: + raise TypeError(f'{self.name} fields cannot be resolved: {error}') + if not isinstance(fields, dict) or not all( + isinstance(key, str) for key in fields): + raise TypeError( + f'{self.name} fields must be a dict with field names as keys' + ' or a function which returns such an object.') + if not all(isinstance(value, GraphQLInputField) or is_input_type(value) + for value in fields.values()): + raise TypeError( + f'{self.name} fields must be' + ' GraphQLInputField or input type objects.') + return {name: value if isinstance(value, GraphQLInputField) + else GraphQLInputField(value) + for name, value in fields.items()} + + +def is_input_object_type(type_: Any) -> bool: + return isinstance(type_, GraphQLInputObjectType) + + +def assert_input_object_type(type_: Any) -> GraphQLInputObjectType: + if not is_input_object_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL Input Object type.') + return type_ + + +class GraphQLInputField: + """Definition of a GraphQL input field""" + + type: 'GraphQLInputType' + description: Optional[str] + default_value: Any + ast_node: Optional[InputValueDefinitionNode] + + def __init__(self, type_: 'GraphQLInputType', description: str=None, + default_value: Any=INVALID, + ast_node: InputValueDefinitionNode=None) -> None: + if not is_input_type(type_): + raise TypeError(f'Input field type must be a GraphQL input type.') + if ast_node and not isinstance(ast_node, InputValueDefinitionNode): + raise TypeError( + 'Input field AST node must be an InputValueDefinitionNode.') + self.type = type_ + self.default_value = default_value + self.description = description + self.ast_node = ast_node + + def __eq__(self, other): + return (self is other or ( + isinstance(other, GraphQLInputField) and + self.type == other.type and + self.description == other.description)) + + +# Wrapper types + +class GraphQLList(Generic[GT], GraphQLWrappingType[GT]): + """List Type Wrapper + + A list is a wrapping type which points to another type. + Lists are often created within the context of defining the fields of + an object type. + + Example:: + + class PersonType(GraphQLObjectType): + name = 'Person' + + @property + def fields(self): + return { + 'parents': GraphQLField(GraphQLList(PersonType())), + 'children': GraphQLField(GraphQLList(PersonType())), + } + """ + + def __init__(self, type_: GT) -> None: + super().__init__(type_=type_) + + def __str__(self): + return f'[{self.of_type}]' + + +def is_list_type(type_: Any) -> bool: + return isinstance(type_, GraphQLList) + + +def assert_list_type(type_: Any) -> GraphQLList: + if not is_list_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL List type.') + return type_ + + +GNT = TypeVar('GNT', bound='GraphQLNullableType') + + +class GraphQLNonNull(GraphQLWrappingType[GNT], Generic[GNT]): + """Non-Null Type Wrapper + + A non-null is a wrapping type which points to another type. + Non-null types enforce that their values are never null and can ensure + an error is raised if this ever occurs during a request. It is useful for + fields which you can make a strong guarantee on non-nullability, + for example usually the id field of a database row will never be null. + + Example:: + + class RowType(GraphQLObjectType): + name = 'Row' + fields = { + 'id': GraphQLField(GraphQLNonNull(GraphQLString())) + } + + Note: the enforcement of non-nullability occurs within the executor. + """ + + def __init__(self, type_: GNT) -> None: + super().__init__(type_=type_) + if isinstance(type_, GraphQLNonNull): + raise TypeError( + 'Can only create NonNull of a Nullable GraphQLType but got:' + f' {type_}.') + + def __str__(self): + return f'{self.of_type}!' + + +def is_non_null_type(type_: Any) -> bool: + return isinstance(type_, GraphQLNonNull) + + +def assert_non_null_type(type_: Any) -> GraphQLNonNull: + if not is_non_null_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL Non-Null type.') + return type_ + + +# These types can all accept null as a value. + +graphql_nullable_types = ( + GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, + GraphQLUnionType, GraphQLEnumType, GraphQLInputObjectType, GraphQLList) + +GraphQLNullableType = Union[ + GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, + GraphQLUnionType, GraphQLEnumType, GraphQLInputObjectType, GraphQLList] + + +def is_nullable_type(type_: Any) -> bool: + return isinstance(type_, graphql_nullable_types) + + +def assert_nullable_type(type_: Any) -> GraphQLNullableType: + if not is_nullable_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL nullable type.') + return type_ + + +@overload +def get_nullable_type(type_: None) -> None: + ... + + +@overload # noqa: F811 (pycqa/flake8#423) +def get_nullable_type(type_: GraphQLNullableType) -> GraphQLNullableType: + ... + + +@overload # noqa: F811 +def get_nullable_type(type_: GraphQLNonNull) -> GraphQLNullableType: + ... + + +def get_nullable_type(type_): # noqa: F811 + """Unwrap possible non-null type""" + if is_non_null_type(type_): + type_ = cast(GraphQLNonNull, type_) + type_ = type_.of_type + return cast(Optional[GraphQLNullableType], type_) + + +# These types may be used as input types for arguments and directives. + +graphql_input_types = ( + GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType) + +GraphQLInputType = Union[ + GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType, + GraphQLWrappingType] + + +def is_input_type(type_: Any) -> bool: + return isinstance(type_, graphql_input_types) or (isinstance( + type_, GraphQLWrappingType) and is_input_type(type_.of_type)) + + +def assert_input_type(type_: Any) -> GraphQLInputType: + if not is_input_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL input type.') + return type_ + + +# These types may be used as output types as the result of fields. + +graphql_output_types = ( + GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, + GraphQLUnionType, GraphQLEnumType) + +GraphQLOutputType = Union[ + GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, + GraphQLUnionType, GraphQLEnumType, GraphQLWrappingType] + + +def is_output_type(type_: Any) -> bool: + return isinstance(type_, graphql_output_types) or (isinstance( + type_, GraphQLWrappingType) and is_output_type(type_.of_type)) + + +def assert_output_type(type_: Any) -> GraphQLOutputType: + if not is_output_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL output type.') + return type_ + + +# These types may describe types which may be leaf values. + +graphql_leaf_types = (GraphQLScalarType, GraphQLEnumType) + +GraphQLLeafType = Union[GraphQLScalarType, GraphQLEnumType] + + +def is_leaf_type(type_: Any) -> bool: + return isinstance(type_, graphql_leaf_types) + + +def assert_leaf_type(type_: Any) -> GraphQLLeafType: + if not is_leaf_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL leaf type.') + return type_ + + +# These types may describe the parent context of a selection set. + +graphql_composite_types = ( + GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType) + +GraphQLCompositeType = Union[ + GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType] + + +def is_composite_type(type_: Any) -> bool: + return isinstance(type_, graphql_composite_types) + + +def assert_composite_type(type_: Any) -> GraphQLType: + if not is_composite_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL composite type.') + return type_ + + +# These types may describe abstract types. + +graphql_abstract_types = (GraphQLInterfaceType, GraphQLUnionType) + +GraphQLAbstractType = Union[GraphQLInterfaceType, GraphQLUnionType] + + +def is_abstract_type(type_: Any) -> bool: + return isinstance(type_, graphql_abstract_types) + + +def assert_abstract_type(type_: Any) -> GraphQLAbstractType: + if not is_abstract_type(type_): + raise TypeError(f'Expected {type_} to be a GraphQL composite type.') + return type_ diff --git a/graphql/type/directives.py b/graphql/type/directives.py new file mode 100644 index 00000000..101c364b --- /dev/null +++ b/graphql/type/directives.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, Sequence, cast + +from ..language import ast, DirectiveLocation +from .definition import ( + GraphQLArgument, GraphQLInputType, GraphQLNonNull, is_input_type) +from .scalars import GraphQLBoolean, GraphQLString + +__all__ = [ + 'is_directive', 'is_specified_directive', 'specified_directives', + 'GraphQLDirective', 'GraphQLIncludeDirective', 'GraphQLSkipDirective', + 'GraphQLDeprecatedDirective', + 'DirectiveLocation', 'DEFAULT_DEPRECATION_REASON'] + + +def is_directive(directive: Any) -> bool: + """Test if the given value is a GraphQL directive.""" + return isinstance(directive, GraphQLDirective) + + +class GraphQLDirective: + """GraphQL Directive + + Directives are used by the GraphQL runtime as a way of modifying execution + behavior. Type system creators will usually not create these directly. + """ + + def __init__(self, name: str, + locations: Sequence[DirectiveLocation], + args: Dict[str, GraphQLArgument]=None, + description: str=None, + ast_node: ast.DirectiveDefinitionNode=None) -> None: + if not name: + raise TypeError('Directive must be named.') + elif not isinstance(name, str): + raise TypeError('The directive name must be a string.') + if not isinstance(locations, (list, tuple)): + raise TypeError('{name} locations must be a list/tuple.') + if not all(isinstance(value, DirectiveLocation) + for value in locations): + try: + locations = [ + value if isinstance(value, DirectiveLocation) + else DirectiveLocation[value] for value in locations] + except (KeyError, TypeError): + raise TypeError( + f'{name} locations must be DirectiveLocation objects.') + if args is None: + args = {} + elif not isinstance(args, dict) or not all( + isinstance(key, str) for key in args): + raise TypeError( + f'{name} args must be a dict with argument names as keys.') + elif not all(isinstance(value, GraphQLArgument) or is_input_type(value) + for value in args.values()): + raise TypeError( + f'{name} args must be GraphQLArgument or input type objects.') + else: + args = {name: cast(GraphQLArgument, value) + if isinstance(value, GraphQLArgument) + else GraphQLArgument(cast(GraphQLInputType, value)) + for name, value in args.items()} + if description is not None and not isinstance(description, str): + raise TypeError('f{name} description must be a string.') + if ast_node and not isinstance(ast_node, ast.DirectiveDefinitionNode): + raise TypeError( + f'{name} AST node must be a DirectiveDefinitionNode.') + self.name = name + self.locations = locations + self.args = args + self.description = description + self.ast_node = ast_node + + def __str__(self): + return f'@{self.name}' + + def __repr__(self): + return f'<{self.__class__.__name__}({self})>' + + +# Used to conditionally include fields or fragments. +GraphQLIncludeDirective = GraphQLDirective( + name='include', + locations=[ + DirectiveLocation.FIELD, + DirectiveLocation.FRAGMENT_SPREAD, + DirectiveLocation.INLINE_FRAGMENT], + args={'if': GraphQLArgument( + GraphQLNonNull(GraphQLBoolean), + description='Included when true.')}, + description='Directs the executor to include this field or fragment' + ' only when the `if` argument is true.') + + +# Used to conditionally skip (exclude) fields or fragments: +GraphQLSkipDirective = GraphQLDirective( + name='skip', + locations=[ + DirectiveLocation.FIELD, + DirectiveLocation.FRAGMENT_SPREAD, + DirectiveLocation.INLINE_FRAGMENT], + args={'if': GraphQLArgument( + GraphQLNonNull(GraphQLBoolean), + description='Skipped when true.')}, + description='Directs the executor to skip this field or fragment' + ' when the `if` argument is true.') + + +# Constant string used for default reason for a deprecation: +DEFAULT_DEPRECATION_REASON = 'No longer supported' + +# Used to declare element of a GraphQL schema as deprecated: +GraphQLDeprecatedDirective = GraphQLDirective( + name='deprecated', + locations=[DirectiveLocation.FIELD_DEFINITION, DirectiveLocation.ENUM_VALUE], + args={'reason': GraphQLArgument( + GraphQLString, + description='Explains why this element was deprecated,' + ' usually also including a suggestion for how to access' + ' supported similar data. Formatted in [Markdown]' + '(https://daringfireball.net/projects/markdown/).', + default_value=DEFAULT_DEPRECATION_REASON)}, + description='Marks an element of a GraphQL schema as no longer supported.') + + +# The full list of specified directives. +specified_directives = ( + GraphQLIncludeDirective, + GraphQLSkipDirective, + GraphQLDeprecatedDirective) + + +def is_specified_directive(directive: GraphQLDirective): + """Check whether the given directive is one of the specified directives.""" + return any(specified_directive.name == directive.name + for specified_directive in specified_directives) diff --git a/graphql/type/introspection.py b/graphql/type/introspection.py new file mode 100644 index 00000000..488f1d6d --- /dev/null +++ b/graphql/type/introspection.py @@ -0,0 +1,411 @@ +from enum import Enum +from typing import Any + +from .definition import ( + GraphQLArgument, GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLInputType, GraphQLList, GraphQLNonNull, GraphQLObjectType, + is_abstract_type, is_enum_type, is_input_object_type, + is_interface_type, is_list_type, is_named_type, is_non_null_type, + is_object_type, is_scalar_type, is_union_type) +from ..pyutils import is_invalid +from .scalars import GraphQLBoolean, GraphQLString +from ..language import DirectiveLocation + +__all__ = [ + 'SchemaMetaFieldDef', 'TypeKind', + 'TypeMetaFieldDef', 'TypeNameMetaFieldDef', + 'introspection_types', 'is_introspection_type'] + + +def print_value(value: Any, type_: GraphQLInputType) -> str: + # Since print_value needs graphql.type, it can only be imported later + from ..utilities.schema_printer import print_value + return print_value(value, type_) + + +__Schema: GraphQLObjectType = GraphQLObjectType( + name='__Schema', + description='A GraphQL Schema defines the capabilities of a GraphQL' + ' server. It exposes all available types and directives' + ' on the server, as well as the entry points for query,' + ' mutation, and subscription operations.', + fields=lambda: { + 'types': GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(__Type))), + resolve=lambda schema, _info: schema.type_map.values(), + description='A list of all types supported by this server.'), + 'queryType': GraphQLField( + GraphQLNonNull(__Type), + resolve=lambda schema, _info: schema.query_type, + description='The type that query operations will be rooted at.'), + 'mutationType': GraphQLField( + __Type, + resolve=lambda schema, _info: schema.mutation_type, + description='If this server supports mutation, the type that' + ' mutation operations will be rooted at.'), + 'subscriptionType': GraphQLField( + __Type, + resolve=lambda schema, _info: schema.subscription_type, + description='If this server support subscription, the type that' + ' subscription operations will be rooted at.'), + 'directives': GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(__Directive))), + resolve=lambda schema, _info: schema.directives, + description='A list of all directives supported by this server.') + }) + + +__Directive: GraphQLObjectType = GraphQLObjectType( + name='__Directive', + description='A Directive provides a way to describe alternate runtime' + ' execution and type validation behavior in a GraphQL' + ' document.\n\nIn some cases, you need to provide options' + " to alter GraphQL's execution behavior in ways field" + ' arguments will not suffice, such as conditionally including' + ' or skipping a field. Directives provide this by describing' + ' additional information to the executor.', + fields=lambda: { + # Note: The fields onOperation, onFragment and onField are deprecated + 'name': GraphQLField( + GraphQLNonNull(GraphQLString), + resolve=lambda obj, _info: obj.name), + 'description': GraphQLField( + GraphQLString, resolve=lambda obj, _info: obj.description), + 'locations': GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(__DirectiveLocation))), + resolve=lambda obj, _info: obj.locations), + 'args': GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(__InputValue))), + resolve=lambda directive, _info: (directive.args or {}).items())}) + + +__DirectiveLocation: GraphQLEnumType = GraphQLEnumType( + name='__DirectiveLocation', + description='A Directive can be adjacent to many parts of the GraphQL' + ' language, a __DirectiveLocation describes one such possible' + ' adjacencies.', + values={ + 'QUERY': GraphQLEnumValue( + DirectiveLocation.QUERY, + description='Location adjacent to a query operation.'), + 'MUTATION': GraphQLEnumValue( + DirectiveLocation.MUTATION, + description='Location adjacent to a mutation operation.'), + 'SUBSCRIPTION': GraphQLEnumValue( + DirectiveLocation.SUBSCRIPTION, + description='Location adjacent to a subscription operation.'), + 'FIELD': GraphQLEnumValue( + DirectiveLocation.FIELD, + description='Location adjacent to a field.'), + 'FRAGMENT_DEFINITION': GraphQLEnumValue( + DirectiveLocation.FRAGMENT_DEFINITION, + description='Location adjacent to a fragment definition.'), + 'FRAGMENT_SPREAD': GraphQLEnumValue( + DirectiveLocation.FRAGMENT_SPREAD, + description='Location adjacent to a fragment spread.'), + 'INLINE_FRAGMENT': GraphQLEnumValue( + DirectiveLocation.INLINE_FRAGMENT, + description='Location adjacent to an inline fragment.'), + 'SCHEMA': GraphQLEnumValue( + DirectiveLocation.SCHEMA, + description='Location adjacent to a schema definition.'), + 'SCALAR': GraphQLEnumValue( + DirectiveLocation.SCALAR, + description='Location adjacent to a scalar definition.'), + 'OBJECT': GraphQLEnumValue( + DirectiveLocation.OBJECT, + description='Location adjacent to an object type definition.'), + 'FIELD_DEFINITION': GraphQLEnumValue( + DirectiveLocation.FIELD_DEFINITION, + description='Location adjacent to a field definition.'), + 'ARGUMENT_DEFINITION': GraphQLEnumValue( + DirectiveLocation.ARGUMENT_DEFINITION, + description='Location adjacent to an argument definition.'), + 'INTERFACE': GraphQLEnumValue( + DirectiveLocation.INTERFACE, + description='Location adjacent to an interface definition.'), + 'UNION': GraphQLEnumValue( + DirectiveLocation.UNION, + description='Location adjacent to a union definition.'), + 'ENUM': GraphQLEnumValue( + DirectiveLocation.ENUM, + description='Location adjacent to an enum definition.'), + 'ENUM_VALUE': GraphQLEnumValue( + DirectiveLocation.ENUM_VALUE, + description='Location adjacent to an enum value definition.'), + 'INPUT_OBJECT': GraphQLEnumValue( + DirectiveLocation.INPUT_OBJECT, + description='Location adjacent to' + ' an input object type definition.'), + 'INPUT_FIELD_DEFINITION': GraphQLEnumValue( + DirectiveLocation.INPUT_FIELD_DEFINITION, + description='Location adjacent to' + ' an input object field definition.')}) + + +__Type: GraphQLObjectType = GraphQLObjectType( + name='__Type', + description='The fundamental unit of any GraphQL Schema is the type.' + ' There are many kinds of types in GraphQL as represented' + ' by the `__TypeKind` enum.\n\nDepending on the kind of a' + ' type, certain fields describe information about that type.' + ' Scalar types provide no information beyond a name and' + ' description, while Enum types provide their values.' + ' Object and Interface types provide the fields they describe.' + ' Abstract types, Union and Interface, provide the Object' + ' types possible at runtime. List and NonNull types compose' + ' other types.', + fields=lambda: { + 'kind': GraphQLField( + GraphQLNonNull(__TypeKind), + resolve=TypeFieldResolvers.kind), + 'name': GraphQLField( + GraphQLString, resolve=TypeFieldResolvers.name), + 'description': GraphQLField( + GraphQLString, resolve=TypeFieldResolvers.description), + 'fields': GraphQLField( + GraphQLList(GraphQLNonNull(__Field)), + args={'includeDeprecated': GraphQLArgument( + GraphQLBoolean, default_value=False)}, + resolve=TypeFieldResolvers.fields), + 'interfaces': GraphQLField( + GraphQLList(GraphQLNonNull(__Type)), + resolve=TypeFieldResolvers.interfaces), + 'possibleTypes': GraphQLField( + GraphQLList(GraphQLNonNull(__Type)), + resolve=TypeFieldResolvers.possible_types), + 'enumValues': GraphQLField( + GraphQLList(GraphQLNonNull(__EnumValue)), + args={'includeDeprecated': GraphQLArgument( + GraphQLBoolean, default_value=False)}, + resolve=TypeFieldResolvers.enum_values), + 'inputFields': GraphQLField( + GraphQLList(GraphQLNonNull(__InputValue)), + resolve=TypeFieldResolvers.input_fields), + 'ofType': GraphQLField( + __Type, resolve=TypeFieldResolvers.of_type)}) + + +class TypeFieldResolvers: + + @staticmethod + def kind(type_, _info): + if is_scalar_type(type_): + return TypeKind.SCALAR + if is_object_type(type_): + return TypeKind.OBJECT + if is_interface_type(type_): + return TypeKind.INTERFACE + if is_union_type(type_): + return TypeKind.UNION + if is_enum_type(type_): + return TypeKind.ENUM + if is_input_object_type(type_): + return TypeKind.INPUT_OBJECT + if is_list_type(type_): + return TypeKind.LIST + if is_non_null_type(type_): + return TypeKind.NON_NULL + raise TypeError(f'Unknown kind of type: {type_}') + + @staticmethod + def name(type_, _info): + return getattr(type_, 'name', None) + + @staticmethod + def description(type_, _info): + return getattr(type_, 'description', None) + + @staticmethod + def fields(type_, _info, includeDeprecated=False): + if is_object_type(type_) or is_interface_type(type_): + items = type_.fields.items() + if not includeDeprecated: + return [item for item in items + if not item[1].deprecation_reason] + return list(items) + + @staticmethod + def interfaces(type_, _info): + if is_object_type(type_): + return type_.interfaces + + @staticmethod + def possible_types(type_, info): + if is_abstract_type(type_): + return info.schema.get_possible_types(type_) + + @staticmethod + def enum_values(type_, _info, includeDeprecated=False): + if is_enum_type(type_): + items = type_.values.items() + if not includeDeprecated: + return [item for item in items + if not item[1].deprecation_reason] + return items + + @staticmethod + def input_fields(type_, _info): + if is_input_object_type(type_): + return type_.fields.items() + + @staticmethod + def of_type(type_, _info): + return getattr(type_, 'of_type', None) + + +__Field: GraphQLObjectType = GraphQLObjectType( + name='__Field', + description='Object and Interface types are described by a list of Fields,' + ' each of which has a name, potentially a list of arguments,' + ' and a return type.', + fields=lambda: { + 'name': GraphQLField( + GraphQLNonNull(GraphQLString), + resolve=lambda item, _info: item[0]), + 'description': GraphQLField( + GraphQLString, + resolve=lambda item, _info: item[1].description), + 'args': GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(__InputValue))), + resolve=lambda item, _info: (item[1].args or {}).items()), + 'type': GraphQLField( + GraphQLNonNull(__Type), + resolve=lambda item, _info: item[1].type), + 'isDeprecated': GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=lambda item, _info: item[1].is_deprecated), + 'deprecationReason': GraphQLField( + GraphQLString, + resolve=lambda item, _info: item[1].deprecation_reason)}) + + +__InputValue: GraphQLObjectType = GraphQLObjectType( + name='__InputValue', + description='Arguments provided to Fields or Directives and the input' + ' fields of an InputObject are represented as Input Values' + ' which describe their type and optionally a default value.', + fields=lambda: { + 'name': GraphQLField( + GraphQLNonNull(GraphQLString), + resolve=lambda item, _info: item[0]), + 'description': GraphQLField( + GraphQLString, + resolve=lambda item, _info: item[1].description), + 'type': GraphQLField( + GraphQLNonNull(__Type), + resolve=lambda item, _info: item[1].type), + 'defaultValue': GraphQLField( + GraphQLString, + description='A GraphQL-formatted string representing' + ' the default value for this input value.', + resolve=lambda item, _info: + None if is_invalid(item[1].default_value) else print_value( + item[1].default_value, item[1].type))}) + + +__EnumValue: GraphQLObjectType = GraphQLObjectType( + name='__EnumValue', + description='One possible value for a given Enum. Enum values are unique' + ' values, not a placeholder for a string or numeric value.' + ' However an Enum value is returned in a JSON response as a' + ' string.', + fields=lambda: { + 'name': GraphQLField( + GraphQLNonNull(GraphQLString), + resolve=lambda item, _info: item[0]), + 'description': GraphQLField( + GraphQLString, + resolve=lambda item, _info: item[1].description), + 'isDeprecated': GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=lambda item, _info: item[1].is_deprecated), + 'deprecationReason': GraphQLField( + GraphQLString, + resolve=lambda item, _info: item[1].deprecation_reason)}) + + +class TypeKind(Enum): + SCALAR = 'scalar' + OBJECT = 'object' + INTERFACE = 'interface' + UNION = 'union' + ENUM = 'enum' + INPUT_OBJECT = 'input object' + LIST = 'list' + NON_NULL = 'non-null' + + +__TypeKind: GraphQLEnumType = GraphQLEnumType( + name='__TypeKind', + description='An enum describing what kind of type a given `__Type` is.', + values={ + 'SCALAR': GraphQLEnumValue( + TypeKind.SCALAR, + description='Indicates this type is a scalar.'), + 'OBJECT': GraphQLEnumValue( + TypeKind.OBJECT, + description='Indicates this type is an object. ' + '`fields` and `interfaces` are valid fields.'), + 'INTERFACE': GraphQLEnumValue( + TypeKind.INTERFACE, + description='Indicates this type is an interface. ' + '`fields` and `possibleTypes` are valid fields.'), + 'UNION': GraphQLEnumValue( + TypeKind.UNION, + description='Indicates this type is a union. ' + '`possibleTypes` is a valid field.'), + 'ENUM': GraphQLEnumValue( + TypeKind.ENUM, + description='Indicates this type is an enum. ' + '`enumValues` is a valid field.'), + 'INPUT_OBJECT': GraphQLEnumValue( + TypeKind.INPUT_OBJECT, + description='Indicates this type is an input object. ' + '`inputFields` is a valid field.'), + 'LIST': GraphQLEnumValue( + TypeKind.LIST, + description='Indicates this type is a list. ' + '`ofType` is a valid field.'), + 'NON_NULL': GraphQLEnumValue( + TypeKind.NON_NULL, + description='Indicates this type is a non-null. ' + '`ofType` is a valid field.')}) + + +SchemaMetaFieldDef = GraphQLField( + GraphQLNonNull(__Schema), # name = '__schema' + description='Access the current type schema of this server.', + args={}, + resolve=lambda source, info: info.schema) + + +TypeMetaFieldDef = GraphQLField( + __Type, # name = '__type' + description='Request the type information of a single type.', + args={'name': GraphQLArgument(GraphQLNonNull(GraphQLString))}, + resolve=lambda source, info, **args: info.schema.get_type(args['name'])) + + +TypeNameMetaFieldDef = GraphQLField( + GraphQLNonNull(GraphQLString), # name='__typename' + description='The name of the current Object type at runtime.', + args={}, + resolve=lambda source, info, **args: info.parent_type.name) + + +# Since double underscore names are subject to name mangling in Python, +# the introspection classes are best imported via this dictionary: +introspection_types = { + '__Schema': __Schema, + '__Directive': __Directive, + '__DirectiveLocation': __DirectiveLocation, + '__Type': __Type, + '__Field': __Field, + '__InputValue': __InputValue, + '__EnumValue': __EnumValue, + '__TypeKind': __TypeKind} + + +def is_introspection_type(type_: Any) -> bool: + return is_named_type(type_) and type_.name in introspection_types diff --git a/graphql/type/scalars.py b/graphql/type/scalars.py new file mode 100644 index 00000000..4fad4c4e --- /dev/null +++ b/graphql/type/scalars.py @@ -0,0 +1,233 @@ +from math import isfinite +from typing import Any + +from ..error import INVALID +from ..pyutils import is_finite, is_integer +from ..language.ast import ( + BooleanValueNode, FloatValueNode, IntValueNode, StringValueNode) +from .definition import GraphQLScalarType, is_named_type + +__all__ = [ + 'is_specified_scalar_type', 'specified_scalar_types', + 'GraphQLInt', 'GraphQLFloat', 'GraphQLString', + 'GraphQLBoolean', 'GraphQLID'] + + +# As per the GraphQL Spec, Integers are only treated as valid when a valid +# 32-bit signed integer, providing the broadest support across platforms. +# +# n.b. JavaScript's integers are safe between -(2^53 - 1) and 2^53 - 1 because +# they are internally represented as IEEE 754 doubles, +# while Python's integers may be arbitrarily large. +MAX_INT = 2147483647 +MIN_INT = -2147483648 + + +def serialize_int(value: Any) -> int: + if isinstance(value, bool): + return 1 if value else 0 + try: + if isinstance(value, int): + num = value + elif isinstance(value, float): + num = int(value) + if num != value: + raise ValueError + elif not value and isinstance(value, str): + value = '' + raise ValueError + else: + num = int(value) + float_value = float(value) + if num != float_value: + raise ValueError + except (OverflowError, ValueError, TypeError): + raise TypeError(f'Int cannot represent non-integer value: {value!r}') + if not MIN_INT <= num <= MAX_INT: + raise TypeError( + f'Int cannot represent non 32-bit signed integer value: {value!r}') + return num + + +def coerce_int(value: Any) -> int: + if not is_integer(value): + raise TypeError(f'Int cannot represent non-integer value: {value!r}') + if not MIN_INT <= value <= MAX_INT: + raise TypeError( + f'Int cannot represent non 32-bit signed integer value: {value!r}') + return int(value) + + +def parse_int_literal(ast, _variables=None): + """Parse an integer value node in the AST.""" + if isinstance(ast, IntValueNode): + num = int(ast.value) + if MIN_INT <= num <= MAX_INT: + return num + return INVALID + + +GraphQLInt = GraphQLScalarType( + name='Int', + description='The `Int` scalar type represents' + ' non-fractional signed whole numeric values.' + ' Int can represent values between -(2^31) and 2^31 - 1. ', + serialize=serialize_int, + parse_value=coerce_int, + parse_literal=parse_int_literal) + + +def serialize_float(value: Any) -> float: + if isinstance(value, bool): + return 1 if value else 0 + try: + if not value and isinstance(value, str): + value = '' + raise ValueError + num = value if isinstance(value, float) else float(value) + if not isfinite(num): + raise ValueError + except (ValueError, TypeError): + raise TypeError(f'Float cannot represent non numeric value: {value!r}') + return num + + +def coerce_float(value: Any) -> float: + if not is_finite(value): + raise TypeError(f'Float cannot represent non numeric value: {value!r}') + return float(value) + + +def parse_float_literal(ast, _variables=None): + """Parse a float value node in the AST.""" + if isinstance(ast, (FloatValueNode, IntValueNode)): + return float(ast.value) + return INVALID + + +GraphQLFloat = GraphQLScalarType( + name='Float', + description='The `Float` scalar type represents' + ' signed double-precision fractional values' + ' as specified by [IEEE 754]' + '(http://en.wikipedia.org/wiki/IEEE_floating_point).', + serialize=serialize_float, + parse_value=coerce_float, + parse_literal=parse_float_literal) + + +def serialize_string(value: Any) -> str: + if isinstance(value, str): + return value + if isinstance(value, bool): + return 'true' if value else 'false' + if is_finite(value): + return str(value) + # do not serialize builtin types as strings, + # but allow serialization of custom types via their __str__ method + if type(value).__module__ == 'builtins': + raise TypeError(f'String cannot represent value: {value!r}') + return str(value) + + +def coerce_string(value: Any) -> str: + if not isinstance(value, str): + raise TypeError( + f'String cannot represent a non string value: {value!r}') + return value + + +def parse_string_literal(ast, _variables=None): + """Parse a string value node in the AST.""" + if isinstance(ast, StringValueNode): + return ast.value + return INVALID + + +GraphQLString = GraphQLScalarType( + name='String', + description='The `String` scalar type represents textual data,' + ' represented as UTF-8 character sequences.' + ' The String type is most often used by GraphQL' + ' to represent free-form human-readable text.', + serialize=serialize_string, + parse_value=coerce_string, + parse_literal=parse_string_literal) + + +def serialize_boolean(value: Any) -> bool: + if isinstance(value, bool): + return value + if is_finite(value): + return bool(value) + raise TypeError(f'Boolean cannot represent a non boolean value: {value!r}') + + +def coerce_boolean(value: Any) -> bool: + if not isinstance(value, bool): + raise TypeError( + f'Boolean cannot represent a non boolean value: {value!r}') + return value + + +def parse_boolean_literal(ast, _variables=None): + """Parse a boolean value node in the AST.""" + if isinstance(ast, BooleanValueNode): + return ast.value + return INVALID + + +GraphQLBoolean = GraphQLScalarType( + name='Boolean', + description='The `Boolean` scalar type represents `true` or `false`.', + serialize=serialize_boolean, + parse_value=coerce_boolean, + parse_literal=parse_boolean_literal) + + +def serialize_id(value: Any) -> str: + if isinstance(value, str): + return value + if is_integer(value): + return str(int(value)) + # do not serialize builtin types as IDs, + # but allow serialization of custom types via their __str__ method + if type(value).__module__ == 'builtins': + raise TypeError(f'ID cannot represent value: {value!r}') + return str(value) + + +def coerce_id(value: Any) -> str: + if not isinstance(value, str) and not is_integer(value): + raise TypeError(f'ID cannot represent value: {value!r}') + if isinstance(value, float): + value = int(value) + return str(value) + + +def parse_id_literal(ast, _variables=None): + """Parse an ID value node in the AST.""" + if isinstance(ast, (StringValueNode, IntValueNode)): + return ast.value + return INVALID + + +GraphQLID = GraphQLScalarType( + name='ID', + description='The `ID` scalar type represents a unique identifier,' + ' often used to refetch an object or as key for a cache.' + ' The ID type appears in a JSON response as a String; however,' + ' it is not intended to be human-readable. When expected as an' + ' input type, any string (such as `"4"`) or integer (such as' + ' `4`) input value will be accepted as an ID.', + serialize=serialize_id, + parse_value=coerce_id, + parse_literal=parse_id_literal) + + +specified_scalar_types = {type_.name: type_ for type_ in ( + GraphQLString, GraphQLInt, GraphQLFloat, GraphQLBoolean, GraphQLID)} + + +def is_specified_scalar_type(type_: Any) -> bool: + return is_named_type(type_) and type_.name in specified_scalar_types diff --git a/graphql/type/schema.py b/graphql/type/schema.py new file mode 100644 index 00000000..02b1fb3d --- /dev/null +++ b/graphql/type/schema.py @@ -0,0 +1,226 @@ +from functools import partial, reduce +from typing import ( + Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, cast) + +from ..error import GraphQLError +from ..language import ast +from .definition import ( + GraphQLAbstractType, GraphQLInterfaceType, GraphQLNamedType, + GraphQLObjectType, GraphQLUnionType, GraphQLInputObjectType, + GraphQLWrappingType, + is_abstract_type, is_input_object_type, is_interface_type, + is_object_type, is_union_type, is_wrapping_type) +from .directives import GraphQLDirective, specified_directives, is_directive +from .introspection import introspection_types + +__all__ = ['GraphQLSchema', 'is_schema'] + + +TypeMap = Dict[str, GraphQLNamedType] + + +def is_schema(schema: Any) -> bool: + """Test if the given value is a GraphQL schema.""" + return isinstance(schema, GraphQLSchema) + + +class GraphQLSchema: + """Schema Definition + + A Schema is created by supplying the root types of each type of operation, + query and mutation (optional). A schema definition is then supplied to the + validator and executor. + + Example:: + + const MyAppSchema = GraphQLSchema( + query=MyAppQueryRootType, + mutation=MyAppMutationRootType) + + Note: If a list of `directives` are provided to GraphQLSchema, that will be + the exact list of directives represented and allowed. If `directives` is + not provided, then a default set of the specified directives (e.g. @include + and @skip) will be used. If you wish to provide *additional* directives to + these specified directives, you must explicitly declare them. Example:: + + const MyAppSchema = GraphQLSchema( + ... + directives=specifiedDirectives + [myCustomDirective]) + """ + + query: Optional[GraphQLObjectType] + mutation: Optional[GraphQLObjectType] + subscription: Optional[GraphQLObjectType] + type_map: TypeMap + directives: List[GraphQLDirective] + ast_node: Optional[ast.SchemaDefinitionNode] + extension_ast_nodes: Optional[Tuple[ast.SchemaExtensionNode]] + + def __init__(self, + query: GraphQLObjectType=None, + mutation: GraphQLObjectType=None, + subscription: GraphQLObjectType=None, + types: Sequence[GraphQLNamedType]=None, + directives: Sequence[GraphQLDirective]=None, + ast_node: ast.SchemaDefinitionNode=None, + extension_ast_nodes: Sequence[ast.SchemaExtensionNode]=None, + assume_valid: bool=False) -> None: + """Initialize GraphQL schema. + + If this schema was built from a source known to be valid, then it may + be marked with assume_valid to avoid an additional type system + validation. Otherwise check for common mistakes during construction + to produce clear and early error messages. + """ + if assume_valid: + # If this schema was built from a source known to be valid, + # then it may be marked with assume_valid to avoid an additional + # type system validation. + self._validation_errors: Optional[List[GraphQLError]] = [] + else: + # Otherwise check for common mistakes during construction to + # produce clear and early error messages. + if types is None: + types = [] + elif isinstance(types, tuple): + types = list(types) + if not isinstance(types, list): + raise TypeError('Schema types must be a list/tuple.') + if isinstance(directives, tuple): + directives = list(directives) + if directives is not None and not isinstance(directives, list): + raise TypeError('Schema directives must be a list/tuple.') + self._validation_errors = None + + self.query_type = query + self.mutation_type = mutation + self.subscription_type = subscription + # Provide specified directives (e.g. @include and @skip) by default + self.directives = list(directives or specified_directives) + self.ast_node = ast_node + self.extension_ast_nodes = cast( + Tuple[ast.SchemaExtensionNode], tuple(extension_ast_nodes) + ) if extension_ast_nodes else None + + # Build type map now to detect any errors within this schema. + initial_types = [query, mutation, subscription, + introspection_types['__Schema']] + if types: + initial_types.extend(types) + + # Keep track of all types referenced within the schema. + type_map: TypeMap = {} + # First by deeply visiting all initial types. + type_map = type_map_reduce(initial_types, type_map) + # Then by deeply visiting all directive types. + type_map = type_map_directive_reduce(self.directives, type_map) + # Storing the resulting map for reference by the schema + self.type_map = type_map + + self._possible_type_map: Dict[str, Set[str]] = {} + + # Keep track of all implementations by interface name. + self._implementations: Dict[str, List[GraphQLObjectType]] = {} + setdefault = self._implementations.setdefault + for type_ in self.type_map.values(): + if is_object_type(type_): + type_ = cast(GraphQLObjectType, type_) + for interface in type_.interfaces: + if is_interface_type(interface): + setdefault(interface.name, []).append(type_) + elif is_abstract_type(type_): + setdefault(type_.name, []) + + def get_type(self, name: str) -> Optional[GraphQLNamedType]: + return self.type_map.get(name) + + def get_possible_types( + self, abstract_type: GraphQLAbstractType + ) -> Sequence[GraphQLObjectType]: + """Get list of all possible concrete types for given abstract type.""" + if is_union_type(abstract_type): + abstract_type = cast(GraphQLUnionType, abstract_type) + return abstract_type.types + return self._implementations[abstract_type.name] + + def is_possible_type( + self, abstract_type: GraphQLAbstractType, + possible_type: GraphQLObjectType) -> bool: + """Check whether a concrete type is possible for an abstract type.""" + possible_type_map = self._possible_type_map + try: + possible_type_names = possible_type_map[abstract_type.name] + except KeyError: + possible_types = self.get_possible_types(abstract_type) + possible_type_names = {type_.name for type_ in possible_types} + possible_type_map[abstract_type.name] = possible_type_names + return possible_type.name in possible_type_names + + def get_directive(self, name: str) -> Optional[GraphQLDirective]: + for directive in self.directives: + if directive.name == name: + return directive + return None + + @property + def validation_errors(self): + return self._validation_errors + + +def type_map_reducer(map_: TypeMap, type_: GraphQLNamedType=None) -> TypeMap: + """Reducer function for creating the type map from given types.""" + if not type_: + return map_ + if is_wrapping_type(type_): + return type_map_reducer( + map_, cast(GraphQLWrappingType[GraphQLNamedType], type_).of_type) + name = type_.name + if name in map_: + if map_[name] is not type_: + raise TypeError( + 'Schema must contain unique named types but contains multiple' + f' types named {name!r}.') + return map_ + map_[name] = type_ + + if is_union_type(type_): + type_ = cast(GraphQLUnionType, type_) + map_ = type_map_reduce(type_.types, map_) + + if is_object_type(type_): + type_ = cast(GraphQLObjectType, type_) + map_ = type_map_reduce(type_.interfaces, map_) + + if is_object_type(type_) or is_interface_type(type_): + for field in cast(GraphQLInterfaceType, type_).fields.values(): + args = field.args + if args: + types = [arg.type for arg in args.values()] + map_ = type_map_reduce(types, map_) + map_ = type_map_reducer(map_, field.type) + + if is_input_object_type(type_): + for field in cast(GraphQLInputObjectType, type_).fields.values(): + map_ = type_map_reducer(map_, field.type) + + return map_ + + +def type_map_directive_reducer( + map_: TypeMap, directive: GraphQLDirective=None) -> TypeMap: + """Reducer function for creating the type map from given directives.""" + # Directives are not validated until validate_schema() is called. + if not is_directive(directive): + return map_ + return reduce(lambda prev_map, arg: + type_map_reducer(prev_map, arg.type), # type: ignore + directive.args.values(), map_) # type: ignore + + +# Reduce functions for type maps: +type_map_reduce: Callable[ # type: ignore + [Sequence[Optional[GraphQLNamedType]], TypeMap], TypeMap] = partial( + reduce, type_map_reducer) +type_map_directive_reduce: Callable[ # type: ignore + [Sequence[Optional[GraphQLDirective]], TypeMap], TypeMap] = partial( + reduce, type_map_directive_reducer) diff --git a/graphql/type/validate.py b/graphql/type/validate.py new file mode 100644 index 00000000..13353044 --- /dev/null +++ b/graphql/type/validate.py @@ -0,0 +1,546 @@ +from operator import attrgetter +from typing import Any, Callable, List, Optional, Sequence, Set, Union, cast + +from ..error import GraphQLError +from ..language import ( + EnumValueDefinitionNode, FieldDefinitionNode, InputValueDefinitionNode, + NamedTypeNode, Node, OperationType, OperationTypeDefinitionNode, TypeNode) +from .definition import ( + GraphQLEnumType, GraphQLInputObjectType, GraphQLInterfaceType, + GraphQLObjectType, GraphQLUnionType, + is_enum_type, is_input_object_type, is_input_type, is_interface_type, + is_named_type, is_non_null_type, + is_object_type, is_output_type, is_union_type) +from ..utilities.assert_valid_name import is_valid_name_error +from ..utilities.type_comparators import is_equal_type, is_type_sub_type_of +from .directives import GraphQLDirective, is_directive +from .introspection import is_introspection_type +from .schema import GraphQLSchema, is_schema + +__all__ = ['validate_schema', 'assert_valid_schema'] + + +def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: + """Validate a GraphQL schema. + + Implements the "Type Validation" sub-sections of the specification's + "Type System" section. + + Validation runs synchronously, returning a list of encountered errors, or + an empty list if no errors were encountered and the Schema is valid. + """ + # First check to ensure the provided value is in fact a GraphQLSchema. + if not is_schema(schema): + raise TypeError(f'Expected {schema!r} to be a GraphQL schema.') + + # If this Schema has already been validated, return the previous results. + # noinspection PyProtectedMember + errors = schema._validation_errors + if errors is None: + + # Validate the schema, producing a list of errors. + context = SchemaValidationContext(schema) + context.validate_root_types() + context.validate_directives() + context.validate_types() + + # Persist the results of validation before returning to ensure + # validation does not run multiple times for this schema. + errors = context.errors + schema._validation_errors = errors + + return errors + + +def assert_valid_schema(schema: GraphQLSchema): + """Utility function which asserts a schema is valid. + + Throws a TypeError if the schema is invalid. + """ + errors = validate_schema(schema) + if errors: + raise TypeError('\n\n'.join(error.message for error in errors)) + + +class SchemaValidationContext: + """Utility class providing a context for schema validation.""" + + errors: List[GraphQLError] + schema: GraphQLSchema + + def __init__(self, schema: GraphQLSchema) -> None: + self.errors = [] + self.schema = schema + + def report_error(self, message: str, nodes: Union[ + Optional[Node], Sequence[Optional[Node]]]=None): + if isinstance(nodes, Node): + nodes = [nodes] + if nodes: + nodes = [node for node in nodes if node] + nodes = cast(Optional[Sequence[Node]], nodes) + self.add_error(GraphQLError(message, nodes)) + + def add_error(self, error: GraphQLError): + self.errors.append(error) + + def validate_root_types(self): + schema = self.schema + + query_type = schema.query_type + if not query_type: + self.report_error( + 'Query root type must be provided.', schema.ast_node) + elif not is_object_type(query_type): + self.report_error( + 'Query root type must be Object type,' + f' it cannot be {query_type}.', + get_operation_type_node( + schema, query_type, OperationType.QUERY)) + + mutation_type = schema.mutation_type + if mutation_type and not is_object_type(mutation_type): + self.report_error( + 'Mutation root type must be Object type if provided,' + f' it cannot be {mutation_type}.', + get_operation_type_node( + schema, mutation_type, OperationType.MUTATION)) + + subscription_type = schema.subscription_type + if subscription_type and not is_object_type(subscription_type): + self.report_error( + 'Subscription root type must be Object type if provided,' + f' it cannot be {subscription_type}.', + get_operation_type_node( + schema, subscription_type, OperationType.SUBSCRIPTION)) + + def validate_directives(self): + directives = self.schema.directives + for directive in directives: + # Ensure all directives are in fact GraphQL directives. + if not is_directive(directive): + self.report_error( + f'Expected directive but got: {directive!r}.', + getattr(directive, 'ast_node', None)) + continue + + # Ensure they are named correctly. + self.validate_name(directive) + + # Ensure the arguments are valid. + arg_names = set() + for arg_name, arg in directive.args.items(): + # Ensure they are named correctly. + self.validate_name(arg_name, arg) + + # Ensure they are unique per directive. + if arg_name in arg_names: + self.report_error( + f'Argument @{directive.name}({arg_name}:)' + ' can only be defined once.', + get_all_directive_arg_nodes(directive, arg_name)) + continue + arg_names.add(arg_name) + + # Ensure the type is an input type. + if not is_input_type(arg.type): + self.report_error( + f'The type of @{directive.name}({arg_name}:)' + f' must be Input Type but got: {arg.type!r}.', + get_directive_arg_type_node(directive, arg_name)) + + def validate_name(self, node: Any, name: str=None): + # Ensure names are valid, however introspection types opt out. + try: + if not name: + name = node.name + name = cast(str, name) + ast_node = node.ast_node + except AttributeError: + pass + else: + error = is_valid_name_error(name, ast_node) + if error: + self.add_error(error) + + def validate_types(self): + for type_ in self.schema.type_map.values(): + + # Ensure all provided types are in fact GraphQL type. + if not is_named_type(type_): + self.report_error( + f'Expected GraphQL named type but got: {type_!r}.', + type_.ast_node if type_ else None) + continue + + # Ensure it is named correctly (excluding introspection types). + if not is_introspection_type(type_): + self.validate_name(type_) + + if is_object_type(type_): + type_ = cast(GraphQLObjectType, type_) + # Ensure fields are valid + self.validate_fields(type_) + + # Ensure objects implement the interfaces they claim to. + self.validate_object_interfaces(type_) + elif is_interface_type(type_): + type_ = cast(GraphQLInterfaceType, type_) + # Ensure fields are valid. + self.validate_fields(type_) + elif is_union_type(type_): + type_ = cast(GraphQLUnionType, type_) + # Ensure Unions include valid member types. + self.validate_union_members(type_) + elif is_enum_type(type_): + type_ = cast(GraphQLEnumType, type_) + # Ensure Enums have valid values. + self.validate_enum_values(type_) + elif is_input_object_type(type_): + type_ = cast(GraphQLInputObjectType, type_) + # Ensure Input Object fields are valid. + self.validate_input_fields(type_) + + def validate_fields( + self, type_: Union[GraphQLObjectType, GraphQLInterfaceType]): + fields = type_.fields + + # Objects and Interfaces both must define one or more fields. + if not fields: + self.report_error( + f'Type {type_.name} must define one or more fields.', + get_all_nodes(type_)) + + for field_name, field in fields.items(): + + # Ensure they are named correctly. + self.validate_name(field, field_name) + + # Ensure they were defined at most once. + field_nodes = get_all_field_nodes(type_, field_name) + if len(field_nodes) > 1: + self.report_error( + f'Field {type_.name}.{field_name}' + ' can only be defined once.', field_nodes) + continue + + # Ensure the type is an output type + if not is_output_type(field.type): + self.report_error( + f'The type of {type_.name}.{field_name}' + ' must be Output Type but got: {field.type!r}.', + get_field_type_node(type_, field_name)) + + # Ensure the arguments are valid. + arg_names: Set[str] = set() + for arg_name, arg in field.args.items(): + # Ensure they are named correctly. + self.validate_name(arg, arg_name) + + # Ensure they are unique per field. + if arg_name in arg_names: + self.report_error( + 'Field argument' + f' {type_.name}.{field_name}({arg_name}:)' + ' can only be defined once.', + get_all_field_arg_nodes(type_, field_name, arg_name)) + break + arg_names.add(arg_name) + + # Ensure the type is an input type. + if not is_input_type(arg.type): + self.report_error( + 'Field argument' + f' {type_.name}.{field_name}({arg_name}:)' + f' must be Input Type but got: {arg.type!r}.', + get_field_arg_type_node(type_, field_name, arg_name)) + + def validate_object_interfaces(self, obj: GraphQLObjectType): + implemented_type_names: Set[str] = set() + for iface in obj.interfaces: + if not is_interface_type(iface): + self.report_error( + f'Type {obj.name} must only implement Interface' + f' types, it cannot implement {iface!r}.', + get_implements_interface_node(obj, iface)) + continue + if iface.name in implemented_type_names: + self.report_error( + f'Type {obj.name} can only implement {iface.name} once.', + get_all_implements_interface_nodes(obj, iface)) + continue + implemented_type_names.add(iface.name) + self.validate_object_implements_interface(obj, iface) + + def validate_object_implements_interface( + self, obj: GraphQLObjectType, iface: GraphQLInterfaceType): + obj_fields, iface_fields = obj.fields, iface.fields + + # Assert each interface field is implemented. + for field_name, iface_field in iface_fields.items(): + obj_field = obj_fields.get(field_name) + + # Assert interface field exists on object. + if not obj_field: + self.report_error( + f'Interface field {iface.name}.{field_name}' + f' expected but {obj.name} does not provide it.', + [get_field_node(iface, field_name)] + + cast(List[Optional[FieldDefinitionNode]], + get_all_nodes(obj))) + continue + + # Assert interface field type is satisfied by object field type, + # by being a valid subtype. (covariant) + if not is_type_sub_type_of( + self.schema, obj_field.type, iface_field.type): + self.report_error( + f'Interface field {iface.name}.{field_name}' + f' expects type {iface_field.type}' + f' but {obj.name}.{field_name}' + f' is type {obj_field.type}.', + [get_field_type_node(iface, field_name), + get_field_type_node(obj, field_name)]) + + # Assert each interface field arg is implemented. + for arg_name, iface_arg in iface_field.args.items(): + obj_arg = obj_field.args.get(arg_name) + + # Assert interface field arg exists on object field. + if not obj_arg: + self.report_error( + 'Interface field argument' + f' {iface.name}.{field_name}({arg_name}:)' + f' expected but {obj.name}.{field_name}' + ' does not provide it.', + [get_field_arg_node(iface, field_name, arg_name), + get_field_node(obj, field_name)]) + continue + + # Assert interface field arg type matches object field arg type + # (invariant). + if not is_equal_type(iface_arg.type, obj_arg.type): + self.report_error( + 'Interface field argument' + f' {iface.name}.{field_name}({arg_name}:)' + f' expects type {iface_arg.type}' + f' but {obj.name}.{field_name}({arg_name}:)' + f' is type {obj_arg.type}.', + [get_field_arg_type_node(iface, field_name, arg_name), + get_field_arg_type_node(obj, field_name, arg_name)]) + + # Assert additional arguments must not be required. + for arg_name, obj_arg in obj_field.args.items(): + iface_arg = iface_field.args.get(arg_name) + if not iface_arg and is_non_null_type(obj_arg.type): + self.report_error( + 'Object field argument' + f' {obj.name}.{field_name}({arg_name}:)' + f' is of required type {obj_arg.type}' + ' but is not also provided by the Interface field' + f' {iface.name}.{field_name}.', + [get_field_arg_type_node(obj, field_name, arg_name), + get_field_node(iface, field_name)]) + + def validate_union_members(self, union: GraphQLUnionType): + member_types = union.types + + if not member_types: + self.report_error( + f'Union type {union.name}' + ' must define one or more member types.', get_all_nodes(union)) + + included_type_names: Set[str] = set() + for member_type in member_types: + if member_type.name in included_type_names: + self.report_error( + f'Union type {union.name} can only include type' + f' {member_type.name} once.', + get_union_member_type_nodes(union, member_type.name)) + continue + included_type_names.add(member_type.name) + + def validate_enum_values(self, enum_type: GraphQLEnumType): + enum_values = enum_type.values + + if not enum_values: + self.report_error( + f'Enum type {enum_type.name} must define one or more values.', + get_all_nodes(enum_type)) + + for value_name, enum_value in enum_values.items(): + # Ensure no duplicates. + all_nodes = get_enum_value_nodes(enum_type, value_name) + if all_nodes and len(all_nodes) > 1: + self.report_error( + f'Enum type {enum_type.name}' + f' can include value {value_name} only once.', all_nodes) + + # Ensure valid name. + self.validate_name(enum_value, value_name) + if value_name in ('true', 'false', 'null'): + self.report_error( + f'Enum type {enum_type.name} cannot include value:' + f' {value_name}.', enum_value.ast_node) + + def validate_input_fields(self, input_obj: GraphQLInputObjectType): + fields = input_obj.fields + + if not fields: + self.report_error( + f'Input Object type {input_obj.name}' + ' must define one or more fields.', get_all_nodes(input_obj)) + + # Ensure the arguments are valid + for field_name, field in fields.items(): + + # Ensure they are named correctly. + self.validate_name(field, field_name) + + # Ensure the type is an input type. + if not is_input_type(field.type): + self.report_error( + f'The type of {input_obj.name}.{field_name}' + f' must be Input Type but got: {field.type!r}.', + field.ast_node.type if field.ast_node else None) + + +def get_operation_type_node(schema: GraphQLSchema, type_: GraphQLObjectType, + operation: OperationType) -> Optional[Node]: + operation_nodes = cast( + List[OperationTypeDefinitionNode], + get_all_sub_nodes(schema, attrgetter('operation_types'))) + for node in operation_nodes: + if node.operation == operation: + return node.type + return type_.ast_node + + +SDLDefinedObject = Union[ + GraphQLSchema, GraphQLDirective, GraphQLInterfaceType, GraphQLObjectType, + GraphQLInputObjectType, GraphQLUnionType, GraphQLEnumType] + + +def get_all_nodes(obj: SDLDefinedObject) -> List[Node]: + node = obj.ast_node + nodes: List[Node] = [node] if node else [] + extension_nodes = getattr(obj, 'extension_ast_nodes', None) + if extension_nodes: + nodes.extend(extension_nodes) + return nodes + + +def get_all_sub_nodes( + obj: SDLDefinedObject, + getter: Callable[[Node], List[Node]]) -> List[Node]: + result: List[Node] = [] + for ast_node in get_all_nodes(obj): + if ast_node: + sub_nodes = getter(ast_node) + if sub_nodes: + result.extend(sub_nodes) + return result + + +def get_implements_interface_node( + type_: GraphQLObjectType, iface: GraphQLInterfaceType + ) -> Optional[NamedTypeNode]: + nodes = get_all_implements_interface_nodes(type_, iface) + return nodes[0] if nodes else None + + +def get_all_implements_interface_nodes( + type_: GraphQLObjectType, iface: GraphQLInterfaceType + ) -> List[NamedTypeNode]: + implements_nodes = cast( + List[NamedTypeNode], + get_all_sub_nodes(type_, attrgetter('interfaces'))) + return [iface_node for iface_node in implements_nodes + if iface_node.name.value == iface.name] + + +def get_field_node( + type_: Union[GraphQLObjectType, GraphQLInterfaceType], + field_name: str) -> Optional[FieldDefinitionNode]: + nodes = get_all_field_nodes(type_, field_name) + return nodes[0] if nodes else None + + +def get_all_field_nodes( + type_: Union[GraphQLObjectType, GraphQLInterfaceType], + field_name: str) -> List[FieldDefinitionNode]: + field_nodes = cast( + List[FieldDefinitionNode], + get_all_sub_nodes(type_, attrgetter('fields'))) + return [field_node for field_node in field_nodes + if field_node.name.value == field_name] + + +def get_field_type_node( + type_: Union[GraphQLObjectType, GraphQLInterfaceType], + field_name: str) -> Optional[TypeNode]: + field_node = get_field_node(type_, field_name) + return field_node.type if field_node else None + + +def get_field_arg_node( + type_: Union[GraphQLObjectType, GraphQLInterfaceType], + field_name: str, arg_name: str) -> Optional[InputValueDefinitionNode]: + nodes = get_all_field_arg_nodes(type_, field_name, arg_name) + return nodes[0] if nodes else None + + +def get_all_field_arg_nodes( + type_: Union[GraphQLObjectType, GraphQLInterfaceType], + field_name: str, arg_name: str) -> List[InputValueDefinitionNode]: + arg_nodes = [] + field_node = get_field_node(type_, field_name) + if field_node and field_node.arguments: + for node in field_node.arguments: + if node.name.value == arg_name: + arg_nodes.append(node) + return arg_nodes + + +def get_field_arg_type_node( + type_: Union[GraphQLObjectType, GraphQLInterfaceType], + field_name: str, arg_name: str) -> Optional[TypeNode]: + field_arg_node = get_field_arg_node(type_, field_name, arg_name) + return field_arg_node.type if field_arg_node else None + + +def get_all_directive_arg_nodes( + directive: GraphQLDirective, arg_name: str + ) -> List[InputValueDefinitionNode]: + arg_nodes = cast( + List[InputValueDefinitionNode], + get_all_sub_nodes(directive, attrgetter('arguments'))) + return [arg_node for arg_node in arg_nodes + if arg_node.name.value == arg_name] + + +def get_directive_arg_type_node( + directive: GraphQLDirective, arg_name: str) -> Optional[TypeNode]: + arg_nodes = get_all_directive_arg_nodes(directive, arg_name) + arg_node = arg_nodes[0] if arg_nodes else None + return arg_node.type if arg_node else None + + +def get_union_member_type_nodes( + union: GraphQLUnionType, type_name: str + ) -> Optional[List[NamedTypeNode]]: + union_nodes = cast( + List[NamedTypeNode], + get_all_sub_nodes(union, attrgetter('types'))) + return [union_node for union_node in union_nodes + if union_node.name.value == type_name] + + +def get_enum_value_nodes( + enum_type: GraphQLEnumType, value_name: str + ) -> Optional[List[EnumValueDefinitionNode]]: + enum_nodes = cast( + List[EnumValueDefinitionNode], + get_all_sub_nodes(enum_type, attrgetter('values'))) + return [enum_node for enum_node in enum_nodes + if enum_node.name.value == value_name] diff --git a/graphql/utilities/__init__.py b/graphql/utilities/__init__.py new file mode 100644 index 00000000..ccd59f2b --- /dev/null +++ b/graphql/utilities/__init__.py @@ -0,0 +1,91 @@ +"""GraphQL Utilities + +The `graphql.utilities` package contains common useful computations to use +with the GraphQL language and type objects. +""" + +# The GraphQL query recommended for a full schema introspection. +from .introspection_query import get_introspection_query + +# Gets the target Operation from a Document +from .get_operation_ast import get_operation_ast + +# Gets the Type for the target Operation AST. +from .get_operation_root_type import get_operation_root_type + +# Convert a GraphQLSchema to an IntrospectionQuery +from .introspection_from_schema import introspection_from_schema + +# Build a GraphQLSchema from an introspection result. +from .build_client_schema import build_client_schema + +# Build a GraphQLSchema from GraphQL Schema language. +from .build_ast_schema import build_ast_schema, build_schema, get_description + +# Extends an existing GraphQLSchema from a parsed GraphQL Schema language AST. +from .extend_schema import extend_schema + +# Sort a GraphQLSchema. +from .lexicographic_sort_schema import lexicographic_sort_schema + +# Print a GraphQLSchema to GraphQL Schema language. +from .schema_printer import ( + print_introspection_schema, print_schema, print_type, print_value) + +# Create a GraphQLType from a GraphQL language AST. +from .type_from_ast import type_from_ast + +# Create a Python value from a GraphQL language AST with a type. +from .value_from_ast import value_from_ast + +# Create a Python value from a GraphQL language AST without a type. +from .value_from_ast_untyped import value_from_ast_untyped + +# Create a GraphQL language AST from a Python value. +from .ast_from_value import ast_from_value + +# A helper to use within recursive-descent visitors which need to be aware of +# the GraphQL type system +from .type_info import TypeInfo + +# Coerces a Python value to a GraphQL type, or produces errors. +from .coerce_value import coerce_value + +# Concatenates multiple AST together. +from .concat_ast import concat_ast + +# Separates an AST into an AST per Operation. +from .separate_operations import separate_operations + +# Comparators for types +from .type_comparators import ( + is_equal_type, is_type_sub_type_of, do_types_overlap) + +# Asserts that a string is a valid GraphQL name +from .assert_valid_name import assert_valid_name, is_valid_name_error + +# Compares two GraphQLSchemas and detects breaking changes. +from .find_breaking_changes import ( + BreakingChange, BreakingChangeType, DangerousChange, DangerousChangeType, + find_breaking_changes, find_dangerous_changes) + +# Report all deprecated usage within a GraphQL document. +from .find_deprecated_usages import find_deprecated_usages + +__all__ = [ + 'BreakingChange', 'BreakingChangeType', + 'DangerousChange', 'DangerousChangeType', 'TypeInfo', + 'assert_valid_name', 'ast_from_value', + 'build_ast_schema', 'build_client_schema', 'build_schema', + 'coerce_value', 'concat_ast', + 'do_types_overlap', 'extend_schema', + 'find_breaking_changes', 'find_dangerous_changes', + 'find_deprecated_usages', + 'get_description', 'get_introspection_query', + 'get_operation_ast', 'get_operation_root_type', + 'is_equal_type', 'is_type_sub_type_of', 'is_valid_name_error', + 'introspection_from_schema', + 'lexicographic_sort_schema', + 'print_introspection_schema', 'print_schema', 'print_type', 'print_value', + 'separate_operations', + 'type_from_ast', 'value_from_ast', 'value_from_ast_untyped'] diff --git a/graphql/utilities/assert_valid_name.py b/graphql/utilities/assert_valid_name.py new file mode 100644 index 00000000..dcc196d6 --- /dev/null +++ b/graphql/utilities/assert_valid_name.py @@ -0,0 +1,34 @@ +import re +from typing import Optional + +from ..language import Node +from ..error import GraphQLError + +__all__ = ['assert_valid_name', 'is_valid_name_error'] + + +re_name = re.compile('^[_a-zA-Z][_a-zA-Z0-9]*$') + + +def assert_valid_name(name: str) -> str: + """Uphold the spec rules about naming.""" + error = is_valid_name_error(name) + if error: + raise error + return name + + +def is_valid_name_error( + name: str, node: Node=None) -> Optional[GraphQLError]: + """Return an Error if a name is invalid.""" + if not isinstance(name, str): + raise TypeError('Expected string') + if name.startswith('__'): + return GraphQLError( + f"Name {name!r} must not begin with '__'," + ' which is reserved by GraphQL introspection.', node) + if not re_name.match(name): + return GraphQLError( + 'Names must match /^[_a-zA-Z][_a-zA-Z0-9]*$/' + f' but {name!r} does not.', node) + return None diff --git a/graphql/utilities/ast_from_value.py b/graphql/utilities/ast_from_value.py new file mode 100644 index 00000000..962ab12f --- /dev/null +++ b/graphql/utilities/ast_from_value.py @@ -0,0 +1,110 @@ +import re +from typing import Any, Iterable, List, Mapping, Optional, cast + +from ..language import ( + BooleanValueNode, EnumValueNode, FloatValueNode, IntValueNode, + ListValueNode, NameNode, NullValueNode, ObjectFieldNode, + ObjectValueNode, StringValueNode, ValueNode) +from ..pyutils import is_nullish, is_invalid +from ..type import ( + GraphQLID, GraphQLInputType, GraphQLInputObjectType, + GraphQLList, GraphQLNonNull, + is_enum_type, is_input_object_type, is_list_type, + is_non_null_type, is_scalar_type) + +__all__ = ['ast_from_value'] + +_re_integer_string = re.compile('^-?(0|[1-9][0-9]*)$') + + +def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: + """Produce a GraphQL Value AST given a Python value. + + A GraphQL type must be provided, which will be used to interpret different + Python values. + + | JSON Value | GraphQL Value | + | ------------- | -------------------- | + | Object | Input Object | + | Array | List | + | Boolean | Boolean | + | String | String / Enum Value | + | Number | Int / Float | + | Mixed | Enum Value | + | null | NullValue | + + """ + if is_non_null_type(type_): + type_ = cast(GraphQLNonNull, type_) + ast_value = ast_from_value(value, type_.of_type) + if isinstance(ast_value, NullValueNode): + return None + return ast_value + + # only explicit None, not INVALID or NaN + if value is None: + return NullValueNode() + + # INVALID or NaN + if is_invalid(value): + return None + + # Convert Python list to GraphQL list. If the GraphQLType is a list, but + # the value is not a list, convert the value using the list's item type. + if is_list_type(type_): + type_ = cast(GraphQLList, type_) + item_type = type_.of_type + if isinstance(value, Iterable) and not isinstance(value, str): + value_nodes = [ + ast_from_value(item, item_type) # type: ignore + for item in value] + return ListValueNode(values=value_nodes) + return ast_from_value(value, item_type) # type: ignore + + # Populate the fields of the input object by creating ASTs from each value + # in the Python dict according to the fields in the input type. + if is_input_object_type(type_): + if value is None or not isinstance(value, Mapping): + return None + type_ = cast(GraphQLInputObjectType, type_) + field_nodes: List[ObjectFieldNode] = [] + append_node = field_nodes.append + for field_name, field in type_.fields.items(): + if field_name in value: + field_value = ast_from_value(value[field_name], field.type) + if field_value: + append_node(ObjectFieldNode( + name=NameNode(value=field_name), value=field_value)) + return ObjectValueNode(fields=field_nodes) + + if is_scalar_type(type_) or is_enum_type(type_): + # Since value is an internally represented value, it must be serialized + # to an externally represented value before converting into an AST. + serialized = type_.serialize(value) # type: ignore + if is_nullish(serialized): + return None + + # Others serialize based on their corresponding Python scalar types. + if isinstance(serialized, bool): + return BooleanValueNode(value=serialized) + + # Python ints and floats correspond nicely to Int and Float values. + if isinstance(serialized, int): + return IntValueNode(value=f'{serialized:d}') + if isinstance(serialized, float): + return FloatValueNode(value=f'{serialized:g}') + + if isinstance(serialized, str): + # Enum types use Enum literals. + if is_enum_type(type_): + return EnumValueNode(value=serialized) + + # ID types can use Int literals. + if type_ is GraphQLID and _re_integer_string.match(serialized): + return IntValueNode(value=serialized) + + return StringValueNode(value=serialized) + + raise TypeError(f'Cannot convert value to AST: {serialized!r}') + + raise TypeError(f'Unknown type: {type_!r}.') diff --git a/graphql/utilities/build_ast_schema.py b/graphql/utilities/build_ast_schema.py new file mode 100644 index 00000000..994b62b0 --- /dev/null +++ b/graphql/utilities/build_ast_schema.py @@ -0,0 +1,381 @@ +from typing import ( + Any, Callable, Dict, List, NoReturn, Optional, Union, cast) + +from ..language import ( + DirectiveDefinitionNode, DirectiveLocation, DocumentNode, + EnumTypeDefinitionNode, EnumValueDefinitionNode, FieldDefinitionNode, + InputObjectTypeDefinitionNode, InputValueDefinitionNode, + InterfaceTypeDefinitionNode, ListTypeNode, NamedTypeNode, NonNullTypeNode, + ObjectTypeDefinitionNode, OperationType, ScalarTypeDefinitionNode, + SchemaDefinitionNode, Source, TypeDefinitionNode, TypeNode, + UnionTypeDefinitionNode, parse, Node) +from ..type import ( + GraphQLArgument, GraphQLDeprecatedDirective, GraphQLDirective, + GraphQLEnumType, GraphQLEnumValue, GraphQLField, GraphQLIncludeDirective, + GraphQLInputType, GraphQLInputField, GraphQLInputObjectType, + GraphQLInterfaceType, GraphQLList, GraphQLNamedType, GraphQLNonNull, + GraphQLNullableType, GraphQLObjectType, GraphQLOutputType, + GraphQLScalarType, GraphQLSchema, GraphQLSkipDirective, GraphQLType, + GraphQLUnionType, introspection_types, specified_scalar_types) +from .value_from_ast import value_from_ast + +TypeDefinitionsMap = Dict[str, TypeDefinitionNode] +TypeResolver = Callable[[NamedTypeNode], GraphQLNamedType] + +__all__ = [ + 'build_ast_schema', 'build_schema', 'get_description', + 'ASTDefinitionBuilder'] + + +def build_ast_schema(ast: DocumentNode, assume_valid: bool=False): + """Build a GraphQL Schema from a given AST. + + This takes the ast of a schema document produced by the parse function in + src/language/parser.py. + + If no schema definition is provided, then it will look for types named + Query and Mutation. + + Given that AST it constructs a GraphQLSchema. The resulting schema + has no resolve methods, so execution will use default resolvers. + + When building a schema from a GraphQL service's introspection result, it + might be safe to assume the schema is valid. Set `assume_valid` to True + to assume the produced schema is valid. + """ + if not isinstance(ast, DocumentNode): + raise TypeError('Must provide a Document AST.') + + schema_def: Optional[SchemaDefinitionNode] = None + type_defs: List[TypeDefinitionNode] = [] + append_type_def = type_defs.append + node_map: TypeDefinitionsMap = {} + directive_defs: List[DirectiveDefinitionNode] = [] + append_directive_def = directive_defs.append + type_definition_nodes = ( + ScalarTypeDefinitionNode, + ObjectTypeDefinitionNode, + InterfaceTypeDefinitionNode, + EnumTypeDefinitionNode, + UnionTypeDefinitionNode, + InputObjectTypeDefinitionNode) + for d in ast.definitions: + if isinstance(d, SchemaDefinitionNode): + if schema_def: + raise TypeError('Must provide only one schema definition.') + schema_def = d + elif isinstance(d, type_definition_nodes): + d = cast(TypeDefinitionNode, d) + type_name = d.name.value + if type_name in node_map: + raise TypeError( + f"Type '{type_name}' was defined more than once.") + append_type_def(d) + node_map[type_name] = d + elif isinstance(d, DirectiveDefinitionNode): + append_directive_def(d) + + if schema_def: + operation_types: Dict[OperationType, Any] = get_operation_types( + schema_def, node_map) + else: + operation_types = { + OperationType.QUERY: node_map.get('Query'), + OperationType.MUTATION: node_map.get('Mutation'), + OperationType.SUBSCRIPTION: node_map.get('Subscription')} + + def resolve_type(type_ref: NamedTypeNode): + raise TypeError( + f"Type {type_ref.name.value!r} not found in document.") + + definition_builder = ASTDefinitionBuilder( + node_map, assume_valid=assume_valid, resolve_type=resolve_type) + + directives = [definition_builder.build_directive(directive_def) + for directive_def in directive_defs] + + # If specified directives were not explicitly declared, add them. + if not any(directive.name == 'skip' for directive in directives): + directives.append(GraphQLSkipDirective) + if not any(directive.name == 'include' for directive in directives): + directives.append(GraphQLIncludeDirective) + if not any(directive.name == 'deprecated' for directive in directives): + directives.append(GraphQLDeprecatedDirective) + + # Note: While this could make early assertions to get the correctly + # typed values below, that would throw immediately while type system + # validation with validate_schema will produce more actionable results. + query_type = operation_types.get(OperationType.QUERY) + mutation_type = operation_types.get(OperationType.MUTATION) + subscription_type = operation_types.get(OperationType.SUBSCRIPTION) + return GraphQLSchema( + query=cast(GraphQLObjectType, + definition_builder.build_type(query_type), + ) if query_type else None, + mutation=cast(GraphQLObjectType, + definition_builder.build_type(mutation_type) + ) if mutation_type else None, + subscription=cast(GraphQLObjectType, + definition_builder.build_type(subscription_type) + ) if subscription_type else None, + types=[definition_builder.build_type(node) for node in type_defs], + directives=directives, + ast_node=schema_def, assume_valid=assume_valid) + + +def get_operation_types( + schema: SchemaDefinitionNode, + node_map: TypeDefinitionsMap) -> Dict[OperationType, NamedTypeNode]: + op_types: Dict[OperationType, NamedTypeNode] = {} + for operation_type in schema.operation_types: + type_name = operation_type.type.name.value + operation = operation_type.operation + if operation in op_types: + raise TypeError( + f'Must provide only one {operation.value} type in schema.') + if type_name not in node_map: + raise TypeError( + f"Specified {operation.value} type '{type_name}'" + ' not found in document.') + op_types[operation] = operation_type.type + return op_types + + +def default_type_resolver(type_ref: NamedTypeNode) -> NoReturn: + """Type resolver that always throws an error.""" + raise TypeError(f"Type '{type_ref.name.value}' not found in document.") + + +class ASTDefinitionBuilder: + + def __init__(self, type_definitions_map: TypeDefinitionsMap, + assume_valid: bool=False, + resolve_type: TypeResolver=default_type_resolver) -> None: + self._type_definitions_map = type_definitions_map + self._assume_valid = assume_valid + self._resolve_type = resolve_type + # Initialize to the GraphQL built in scalars and introspection types. + self._cache: Dict[str, GraphQLNamedType] = { + **specified_scalar_types, **introspection_types} + + def build_type(self, node: Union[NamedTypeNode, TypeDefinitionNode] + ) -> GraphQLNamedType: + type_name = node.name.value + cache = self._cache + if type_name not in cache: + if isinstance(node, NamedTypeNode): + def_node = self._type_definitions_map.get(type_name) + cache[type_name] = self._make_schema_def( + def_node) if def_node else self._resolve_type(node) + else: + cache[type_name] = self._make_schema_def(node) + return cache[type_name] + + def _build_wrapped_type(self, type_node: TypeNode) -> GraphQLType: + if isinstance(type_node, ListTypeNode): + return GraphQLList(self._build_wrapped_type(type_node.type)) + if isinstance(type_node, NonNullTypeNode): + return GraphQLNonNull( + # Note: GraphQLNonNull constructor validates this type + cast(GraphQLNullableType, + self._build_wrapped_type(type_node.type))) + return self.build_type(cast(NamedTypeNode, type_node)) + + def build_directive( + self, directive_node: DirectiveDefinitionNode) -> GraphQLDirective: + return GraphQLDirective( + name=directive_node.name.value, + description=directive_node.description.value + if directive_node.description else None, + locations=[DirectiveLocation[node.value] + for node in directive_node.locations], + args=self._make_args(directive_node.arguments) + if directive_node.arguments else None, + ast_node=directive_node) + + def build_field(self, field: FieldDefinitionNode) -> GraphQLField: + # Note: While this could make assertions to get the correctly typed + # value, that would throw immediately while type system validation + # with validate_schema() will produce more actionable results. + type_ = self._build_wrapped_type(field.type) + type_ = cast(GraphQLOutputType, type_) + return GraphQLField( + type_=type_, + description=field.description.value if field.description else None, + args=self._make_args(field.arguments) + if field.arguments else None, + deprecation_reason=get_deprecation_reason(field), + ast_node=field) + + def build_input_field( + self, value: InputValueDefinitionNode) -> GraphQLInputField: + # Note: While this could make assertions to get the correctly typed + # value, that would throw immediately while type system validation + # with validate_schema() will produce more actionable results. + type_ = self._build_wrapped_type(value.type) + type_ = cast(GraphQLInputType, type_) + return GraphQLInputField( + type_=type_, + description=value.description.value if value.description else None, + default_value=value_from_ast(value.default_value, type_), + ast_node=value) + + @staticmethod + def build_enum_value(value: EnumValueDefinitionNode) -> GraphQLEnumValue: + return GraphQLEnumValue( + description=value.description.value if value.description else None, + deprecation_reason=get_deprecation_reason(value), + ast_node=value) + + def _make_schema_def( + self, type_def: TypeDefinitionNode) -> GraphQLNamedType: + method = { + 'object_type_definition': self._make_type_def, + 'interface_type_definition': self._make_interface_def, + 'enum_type_definition': self._make_enum_def, + 'union_type_definition': self._make_union_def, + 'scalar_type_definition': self._make_scalar_def, + 'input_object_type_definition': self._make_input_object_def + }.get(type_def.kind) + if not method: + raise TypeError(f"Type kind '{type_def.kind}' not supported.") + return method(type_def) # type: ignore + + def _make_type_def( + self, type_def: ObjectTypeDefinitionNode) -> GraphQLObjectType: + interfaces = type_def.interfaces + return GraphQLObjectType( + name=type_def.name.value, + description=type_def.description.value + if type_def.description else None, + fields=lambda: self._make_field_def_map(type_def), + # While this could make early assertions to get the correctly typed + # values, that would throw immediately while type system validation + # with validate_schema will produce more actionable results. + interfaces=(lambda: [ + self.build_type(ref) for ref in interfaces]) # type: ignore + if interfaces else [], + ast_node=type_def) + + def _make_field_def_map(self, type_def: Union[ + ObjectTypeDefinitionNode, InterfaceTypeDefinitionNode] + ) -> Dict[str, GraphQLField]: + fields = type_def.fields + return {field.name.value: self.build_field(field) + for field in fields} if fields else {} + + def _make_arg( + self, value_node: InputValueDefinitionNode) -> GraphQLArgument: + # Note: While this could make assertions to get the correctly typed + # value, that would throw immediately while type system validation + # with validate_schema will produce more actionable results. + type_ = self._build_wrapped_type(value_node.type) + type_ = cast(GraphQLInputType, type_) + return GraphQLArgument( + type_=type_, + description=value_node.description.value + if value_node.description else None, + default_value=value_from_ast(value_node.default_value, type_), + ast_node=value_node) + + def _make_args( + self, values: List[InputValueDefinitionNode] + ) -> Dict[str, GraphQLArgument]: + return {value.name.value: self._make_arg(value) + for value in values} + + def _make_input_fields( + self, values: List[InputValueDefinitionNode] + ) -> Dict[str, GraphQLInputField]: + return {value.name.value: self.build_input_field(value) + for value in values} + + def _make_interface_def( + self, type_def: InterfaceTypeDefinitionNode + ) -> GraphQLInterfaceType: + return GraphQLInterfaceType( + name=type_def.name.value, + description=type_def.description.value + if type_def.description else None, + fields=lambda: self._make_field_def_map(type_def), + ast_node=type_def) + + def _make_enum_def( + self, type_def: EnumTypeDefinitionNode) -> GraphQLEnumType: + return GraphQLEnumType( + name=type_def.name.value, + description=type_def.description.value + if type_def.description else None, + values=self._make_value_def_map(type_def), + ast_node=type_def) + + def _make_value_def_map( + self, type_def: EnumTypeDefinitionNode + ) -> Dict[str, GraphQLEnumValue]: + return {value.name.value: self.build_enum_value(value) + for value in type_def.values} if type_def.values else {} + + def _make_union_def( + self, type_def: UnionTypeDefinitionNode + ) -> GraphQLUnionType: + types = type_def.types + return GraphQLUnionType( + name=type_def.name.value, + description=type_def.description.value + if type_def.description else None, + # Note: While this could make assertions to get the correctly typed + # values below, that would throw immediately while type system + # validation with validate_schema will get more actionable results. + types=(lambda: [ + self.build_type(ref) for ref in types]) # type: ignore + if types else [], + ast_node=type_def) + + @staticmethod + def _make_scalar_def( + type_def: ScalarTypeDefinitionNode) -> GraphQLScalarType: + return GraphQLScalarType( + name=type_def.name.value, + description=type_def.description.value + if type_def.description else None, + ast_node=type_def, + serialize=lambda value: value) + + def _make_input_object_def( + self, type_def: InputObjectTypeDefinitionNode + ) -> GraphQLInputObjectType: + return GraphQLInputObjectType( + name=type_def.name.value, + description=type_def.description.value + if type_def.description else None, + fields=(lambda: self._make_input_fields( + cast(List[InputValueDefinitionNode], type_def.fields))) + if type_def.fields else cast(Dict[str, GraphQLInputField], {}), + ast_node=type_def) + + +def get_deprecation_reason(node: Union[ + EnumValueDefinitionNode, FieldDefinitionNode]) -> Optional[str]: + """Given a field or enum value node, get deprecation reason as string.""" + from ..execution import get_directive_values + deprecated = get_directive_values(GraphQLDeprecatedDirective, node) + return deprecated['reason'] if deprecated else None + + +def get_description(node: Node) -> Optional[str]: + """@deprecated: Given an ast node, returns its string description.""" + try: + # noinspection PyUnresolvedReferences + return node.description.value # type: ignore + except AttributeError: + return None + + +def build_schema(source: Union[str, Source], + assume_valid=False, no_location=False, + experimental_fragment_variables=False) -> GraphQLSchema: + """Build a GraphQLSchema directly from a source document.""" + return build_ast_schema(parse( + source, no_location=no_location, + experimental_fragment_variables=experimental_fragment_variables), + assume_valid=assume_valid) diff --git a/graphql/utilities/build_client_schema.py b/graphql/utilities/build_client_schema.py new file mode 100644 index 00000000..c320e199 --- /dev/null +++ b/graphql/utilities/build_client_schema.py @@ -0,0 +1,274 @@ +from typing import cast, Callable, Dict, Sequence + +from ..error import INVALID +from ..language import DirectiveLocation, parse_value +from ..type import ( + GraphQLArgument, GraphQLDirective, GraphQLEnumType, GraphQLEnumValue, + GraphQLField, GraphQLInputField, GraphQLInputObjectType, GraphQLInputType, + GraphQLInterfaceType, GraphQLList, GraphQLNamedType, GraphQLNonNull, + GraphQLObjectType, GraphQLOutputType, GraphQLScalarType, GraphQLSchema, + GraphQLType, GraphQLUnionType, TypeKind, assert_interface_type, + assert_nullable_type, assert_object_type, introspection_types, + is_input_type, is_output_type, specified_scalar_types) +from .value_from_ast import value_from_ast + +__all__ = ['build_client_schema'] + + +def build_client_schema( + introspection: Dict, assume_valid: bool=False) -> GraphQLSchema: + """Build a GraphQLSchema for use by client tools. + + Given the result of a client running the introspection query, creates and + returns a GraphQLSchema instance which can be then used with all + GraphQL-core-next tools, but cannot be used to execute a query, as + introspection does not represent the "resolver", "parse" or "serialize" + functions or any other server-internal mechanisms. + + This function expects a complete introspection result. Don't forget to + check the "errors" field of a server response before calling this function. + """ + # Get the schema from the introspection result. + schema_introspection = introspection['__schema'] + + # Converts the list of types into a dict based on the type names. + type_introspection_map: Dict[str, Dict] = { + type_['name']: type_ for type_ in schema_introspection['types']} + + # A cache to use to store the actual GraphQLType definition objects by + # name. Initialize to the GraphQL built in scalars. All functions below are + # inline so that this type def cache is within the scope of the closure. + type_def_cache: Dict[str, GraphQLNamedType] = { + **specified_scalar_types, **introspection_types} + + # Given a type reference in introspection, return the GraphQLType instance. + # preferring cached instances before building new instances. + def get_type(type_ref: Dict) -> GraphQLType: + kind = type_ref.get('kind') + if kind == TypeKind.LIST.name: + item_ref = type_ref.get('ofType') + if not item_ref: + raise TypeError( + 'Decorated type deeper than introspection query.') + return GraphQLList(get_type(item_ref)) + elif kind == TypeKind.NON_NULL.name: + nullable_ref = type_ref.get('ofType') + if not nullable_ref: + raise TypeError( + 'Decorated type deeper than introspection query.') + nullable_type = get_type(nullable_ref) + return GraphQLNonNull(assert_nullable_type(nullable_type)) + name = type_ref.get('name') + if not name: + raise TypeError(f'Unknown type reference: {type_ref!r}') + return get_named_type(name) + + def get_named_type(type_name: str) -> GraphQLNamedType: + cached_type = type_def_cache.get(type_name) + if cached_type: + return cached_type + type_introspection = type_introspection_map.get(type_name) + if not type_introspection: + raise TypeError( + f'Invalid or incomplete schema, unknown type: {type_name}.' + ' Ensure that a full introspection query is used in order' + ' to build a client schema.') + type_def = build_type(type_introspection) + type_def_cache[type_name] = type_def + return type_def + + def get_input_type(type_ref: Dict) -> GraphQLInputType: + input_type = get_type(type_ref) + if not is_input_type(input_type): + raise TypeError( + 'Introspection must provide input type for arguments.') + return cast(GraphQLInputType, input_type) + + def get_output_type(type_ref: Dict) -> GraphQLOutputType: + output_type = get_type(type_ref) + if not is_output_type(output_type): + raise TypeError( + 'Introspection must provide output type for fields.') + return cast(GraphQLOutputType, output_type) + + def get_object_type(type_ref: Dict) -> GraphQLObjectType: + object_type = get_type(type_ref) + return assert_object_type(object_type) + + def get_interface_type(type_ref: Dict) -> GraphQLInterfaceType: + interface_type = get_type(type_ref) + return assert_interface_type(interface_type) + + # Given a type's introspection result, construct the correct + # GraphQLType instance. + def build_type(type_: Dict) -> GraphQLNamedType: + if type_ and 'name' in type_ and 'kind' in type_: + builder = type_builders.get(cast(str, type_['kind'])) + if builder: + return cast(GraphQLNamedType, builder(type_)) + raise TypeError( + 'Invalid or incomplete introspection result.' + ' Ensure that a full introspection query is used in order' + f' to build a client schema: {type_!r}') + + def build_scalar_def(scalar_introspection: Dict) -> GraphQLScalarType: + return GraphQLScalarType( + name=scalar_introspection['name'], + description=scalar_introspection.get('description'), + serialize=lambda value: value) + + def build_object_def(object_introspection: Dict) -> GraphQLObjectType: + interfaces = object_introspection.get('interfaces') + if interfaces is None: + raise TypeError( + 'Introspection result missing interfaces:' + f' {object_introspection!r}') + return GraphQLObjectType( + name=object_introspection['name'], + description=object_introspection.get('description'), + interfaces=[ + get_interface_type(interface) for interface in interfaces], + fields=lambda: build_field_def_map(object_introspection)) + + def build_interface_def( + interface_introspection: Dict) -> GraphQLInterfaceType: + return GraphQLInterfaceType( + name=interface_introspection['name'], + description=interface_introspection.get('description'), + fields=lambda: build_field_def_map(interface_introspection)) + + def build_union_def(union_introspection: Dict) -> GraphQLUnionType: + possible_types = union_introspection.get('possibleTypes') + if possible_types is None: + raise TypeError( + 'Introspection result missing possibleTypes:' + f' {union_introspection!r}') + return GraphQLUnionType( + name=union_introspection['name'], + description=union_introspection.get('description'), + types=[get_object_type(type_) for type_ in possible_types]) + + def build_enum_def(enum_introspection: Dict) -> GraphQLEnumType: + if enum_introspection.get('enumValues') is None: + raise TypeError( + 'Introspection result missing enumValues:' + f' {enum_introspection!r}') + return GraphQLEnumType( + name=enum_introspection['name'], + description=enum_introspection.get('description'), + values={value_introspect['name']: GraphQLEnumValue( + description=value_introspect.get('description'), + deprecation_reason=value_introspect.get('deprecationReason')) + for value_introspect in enum_introspection['enumValues']}) + + def build_input_object_def( + input_object_introspection: Dict) -> GraphQLInputObjectType: + if input_object_introspection.get('inputFields') is None: + raise TypeError( + 'Introspection result missing inputFields:' + f' {input_object_introspection!r}') + return GraphQLInputObjectType( + name=input_object_introspection['name'], + description=input_object_introspection.get('description'), + fields=lambda: build_input_value_def_map( + input_object_introspection['inputFields'])) + + type_builders: Dict[str, Callable[[Dict], GraphQLType]] = { + TypeKind.SCALAR.name: build_scalar_def, + TypeKind.OBJECT.name: build_object_def, + TypeKind.INTERFACE.name: build_interface_def, + TypeKind.UNION.name: build_union_def, + TypeKind.ENUM.name: build_enum_def, + TypeKind.INPUT_OBJECT.name: build_input_object_def} + + def build_field(field_introspection: Dict) -> GraphQLField: + if field_introspection.get('args') is None: + raise TypeError( + 'Introspection result missing field args:' + f' {field_introspection!r}') + return GraphQLField( + get_output_type(field_introspection['type']), + args=build_arg_value_def_map(field_introspection['args']), + description=field_introspection.get('description'), + deprecation_reason=field_introspection.get('deprecationReason')) + + def build_field_def_map( + type_introspection: Dict) -> Dict[str, GraphQLField]: + if type_introspection.get('fields') is None: + raise TypeError( + 'Introspection result missing fields:' + f' {type_introspection!r}') + return {field_introspection['name']: build_field(field_introspection) + for field_introspection in type_introspection['fields']} + + def build_arg_value( + arg_introspection: Dict) -> GraphQLArgument: + type_ = get_input_type(arg_introspection['type']) + default_value = arg_introspection.get('defaultValue') + default_value = INVALID if default_value is None else value_from_ast( + parse_value(default_value), type_) + return GraphQLArgument( + type_, default_value=default_value, + description=arg_introspection.get('description')) + + def build_arg_value_def_map( + arg_introspections: Dict) -> Dict[str, GraphQLArgument]: + return {input_value_introspection['name']: + build_arg_value(input_value_introspection) + for input_value_introspection in arg_introspections} + + def build_input_value( + input_value_introspection: Dict) -> GraphQLInputField: + type_ = get_input_type(input_value_introspection['type']) + default_value = input_value_introspection.get('defaultValue') + default_value = INVALID if default_value is None else value_from_ast( + parse_value(default_value), type_) + return GraphQLInputField( + type_, default_value=default_value, + description=input_value_introspection.get('description')) + + def build_input_value_def_map( + input_value_introspections: Dict) -> Dict[str, GraphQLInputField]: + return {input_value_introspection['name']: + build_input_value(input_value_introspection) + for input_value_introspection in input_value_introspections} + + def build_directive(directive_introspection: Dict) -> GraphQLDirective: + if directive_introspection.get('args') is None: + raise TypeError( + 'Introspection result missing directive args:' + f' {directive_introspection!r}') + return GraphQLDirective( + name=directive_introspection['name'], + description=directive_introspection.get('description'), + locations=list(cast(Sequence[DirectiveLocation], + directive_introspection.get('locations'))), + args=build_arg_value_def_map(directive_introspection['args'])) + + # Iterate through all types, getting the type definition for each, ensuring + # that any type not directly referenced by a field will get created. + types = [get_named_type(name) for name in type_introspection_map] + + # Get the root Query, Mutation, and Subscription types. + + query_type_ref = schema_introspection.get('queryType') + query_type = get_object_type(query_type_ref) if query_type_ref else None + mutation_type_ref = schema_introspection.get('mutationType') + mutation_type = get_object_type( + mutation_type_ref) if mutation_type_ref else None + subscription_type_ref = schema_introspection.get('subscriptionType') + subscription_type = get_object_type( + subscription_type_ref) if subscription_type_ref else None + + # Get the directives supported by Introspection, assuming empty-set if + # directives were not queried for. + directive_introspections = schema_introspection.get('directives') + directives = [build_directive(directive_introspection) + for directive_introspection in directive_introspections + ] if directive_introspections else [] + + return GraphQLSchema( + query=query_type, mutation=mutation_type, + subscription=subscription_type, + types=types, directives=directives, + assume_valid=assume_valid) diff --git a/graphql/utilities/coerce_value.py b/graphql/utilities/coerce_value.py new file mode 100644 index 00000000..fe3e6b88 --- /dev/null +++ b/graphql/utilities/coerce_value.py @@ -0,0 +1,178 @@ +from typing import Any, Dict, Iterable, List, NamedTuple, Optional, Union, cast + +from ..error import GraphQLError, INVALID +from ..language import Node +from ..pyutils import is_invalid, or_list, suggestion_list +from ..type import ( + GraphQLEnumType, GraphQLInputObjectType, GraphQLInputType, + GraphQLList, GraphQLScalarType, is_enum_type, is_input_object_type, + is_list_type, is_non_null_type, is_scalar_type, GraphQLNonNull) + +__all__ = ['coerce_value', 'CoercedValue'] + + +class CoercedValue(NamedTuple): + errors: Optional[List[GraphQLError]] + value: Any + + +class Path(NamedTuple): + prev: Any # Optional['Path'] (python/mypy/issues/731) + key: Union[str, int] + + +def coerce_value(value: Any, type_: GraphQLInputType, blame_node: Node=None, + path: Path=None) -> CoercedValue: + """Coerce a Python value given a GraphQL Type. + + Returns either a value which is valid for the provided type or a list of + encountered coercion errors. + """ + # A value must be provided if the type is non-null. + if is_non_null_type(type_): + if value is None or value is INVALID: + return of_errors([coercion_error( + f'Expected non-nullable type {type_} not to be null', + blame_node, path)]) + type_ = cast(GraphQLNonNull, type_) + return coerce_value(value, type_.of_type, blame_node, path) + + if value is None or value is INVALID: + # Explicitly return the value null. + return of_value(None) + + if is_scalar_type(type_): + # Scalars determine if a value is valid via parse_value(), which can + # throw to indicate failure. If it throws, maintain a reference to + # the original error. + type_ = cast(GraphQLScalarType, type_) + try: + parse_result = type_.parse_value(value) + if is_invalid(parse_result): + return of_errors([ + coercion_error( + f'Expected type {type_.name}', blame_node, path)]) + return of_value(parse_result) + except (TypeError, ValueError) as error: + return of_errors([ + coercion_error(f'Expected type {type_.name}', blame_node, + path, str(error), error)]) + + if is_enum_type(type_): + type_ = cast(GraphQLEnumType, type_) + values = type_.values + if isinstance(value, str): + enum_value = values.get(value) + if enum_value: + return of_value( + value if enum_value.value is None else enum_value.value) + suggestions = suggestion_list(str(value), values) + did_you_mean = (f'did you mean {or_list(suggestions)}?' + if suggestions else None) + return of_errors([coercion_error( + f'Expected type {type_.name}', blame_node, path, did_you_mean)]) + + if is_list_type(type_): + type_ = cast(GraphQLList, type_) + item_type = type_.of_type + if isinstance(value, Iterable) and not isinstance(value, str): + errors = None + coerced_value_list: List[Any] = [] + append_item = coerced_value_list.append + for index, item_value in enumerate(value): + coerced_item = coerce_value( + item_value, item_type, blame_node, at_path(path, index)) + if coerced_item.errors: + errors = add(errors, *coerced_item.errors) + elif not errors: + append_item(coerced_item.value) + return of_errors(errors) if errors else of_value( + coerced_value_list) + # Lists accept a non-list value as a list of one. + coerced_item = coerce_value(value, item_type, blame_node) + return coerced_item if coerced_item.errors else of_value( + [coerced_item.value]) + + if is_input_object_type(type_): + type_ = cast(GraphQLInputObjectType, type_) + if not isinstance(value, dict): + return of_errors([coercion_error( + f'Expected type {type_.name} to be a dict', blame_node, path)]) + errors = None + coerced_value_dict: Dict[str, Any] = {} + fields = type_.fields + + # Ensure every defined field is valid. + for field_name, field in fields.items(): + field_value = value.get(field_name, INVALID) + if is_invalid(field_value): + if not is_invalid(field.default_value): + coerced_value_dict[field_name] = field.default_value + elif is_non_null_type(field.type): + errors = add(errors, coercion_error( + f'Field {print_path(at_path(path, field_name))}' + f' of required type {field.type} was not provided', + blame_node)) + else: + coerced_field = coerce_value( + field_value, field.type, blame_node, + at_path(path, field_name)) + if coerced_field.errors: + errors = add(errors, *coerced_field.errors) + else: + coerced_value_dict[field_name] = coerced_field.value + + # Ensure every provided field is defined. + for field_name in value: + if field_name not in fields: + suggestions = suggestion_list(field_name, fields) + did_you_mean = (f'did you mean {or_list(suggestions)}?' + if suggestions else None) + errors = add(errors, coercion_error( + f"Field '{field_name}'" + f" is not defined by type {type_.name}", + blame_node, path, did_you_mean)) + + return of_errors(errors) if errors else of_value(coerced_value_dict) + + raise TypeError('Unexpected type: {type_}.') + + +def of_value(value: Any) -> CoercedValue: + return CoercedValue(None, value) + + +def of_errors(errors: List[GraphQLError]) -> CoercedValue: + return CoercedValue(errors, INVALID) + + +def add(errors: Optional[List[GraphQLError]], + *more_errors: GraphQLError) -> List[GraphQLError]: + return (errors or []) + list(more_errors) + + +def at_path(prev: Optional[Path], key: Union[str, int]) -> Path: + return Path(prev, key) + + +def coercion_error(message: str, blame_node: Node=None, + path: Path=None, sub_message: str=None, + original_error: Exception=None) -> GraphQLError: + """Return a GraphQLError instance""" + if path: + path_str = print_path(path) + message += f' at {path_str}' + message += f'; {sub_message}' if sub_message else '.' + # noinspection PyArgumentEqualDefault + return GraphQLError(message, blame_node, None, None, None, original_error) + + +def print_path(path: Path) -> str: + """Build string describing the path into the value where error was found""" + path_str = '' + current_path: Optional[Path] = path + while current_path: + path_str = (f'.{current_path.key}' if isinstance(current_path.key, str) + else f'[{current_path.key}]') + path_str + current_path = current_path.prev + return f'value{path_str}' if path_str else '' diff --git a/graphql/utilities/concat_ast.py b/graphql/utilities/concat_ast.py new file mode 100644 index 00000000..8400f068 --- /dev/null +++ b/graphql/utilities/concat_ast.py @@ -0,0 +1,17 @@ +from typing import Sequence +from itertools import chain + +from ..language.ast import DocumentNode + +__all__ = ['concat_ast'] + + +def concat_ast(asts: Sequence[DocumentNode]) -> DocumentNode: + """Concat ASTs. + + Provided a collection of ASTs, presumably each from different files, + concatenate the ASTs together into batched AST, useful for validating many + GraphQL source files which together represent one conceptual application. + """ + return DocumentNode(definitions=list(chain.from_iterable( + document.definitions for document in asts))) diff --git a/graphql/utilities/extend_schema.py b/graphql/utilities/extend_schema.py new file mode 100644 index 00000000..8b0df158 --- /dev/null +++ b/graphql/utilities/extend_schema.py @@ -0,0 +1,491 @@ +from collections import defaultdict +from functools import partial +from itertools import chain +from typing import ( + Any, Callable, Dict, List, Optional, Union, Tuple, cast) + +from ..error import GraphQLError +from ..language import ( + DirectiveDefinitionNode, DocumentNode, + EnumTypeDefinitionNode, EnumTypeExtensionNode, + InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode, + InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, + ObjectTypeDefinitionNode, ObjectTypeExtensionNode, OperationType, + ScalarTypeDefinitionNode, ScalarTypeExtensionNode, + SchemaExtensionNode, SchemaDefinitionNode, + UnionTypeDefinitionNode, UnionTypeExtensionNode, + NamedTypeNode, TypeExtensionNode) +from ..type import ( + GraphQLArgument, GraphQLArgumentMap, GraphQLDirective, + GraphQLEnumType, GraphQLEnumValue, GraphQLEnumValueMap, + GraphQLField, GraphQLFieldMap, GraphQLInputField, GraphQLInputFieldMap, + GraphQLInputObjectType, GraphQLInputType, GraphQLInterfaceType, + GraphQLList, GraphQLNamedType, GraphQLNonNull, GraphQLObjectType, + GraphQLScalarType, GraphQLSchema, GraphQLType, GraphQLUnionType, + is_enum_type, is_input_object_type, is_interface_type, is_list_type, + is_non_null_type, is_object_type, is_scalar_type, is_schema, is_union_type, + is_introspection_type, is_specified_scalar_type) +from .build_ast_schema import ASTDefinitionBuilder + +__all__ = ['extend_schema'] + + +def extend_schema(schema: GraphQLSchema, document_ast: DocumentNode, + assume_valid=False) -> GraphQLSchema: + """Extend the schema with extensions from a given document. + + Produces a new schema given an existing schema and a document which may + contain GraphQL type extensions and definitions. The original schema will + remain unaltered. + + Because a schema represents a graph of references, a schema cannot be + extended without effectively making an entire copy. We do not know until + it's too late if subgraphs remain unchanged. + + This algorithm copies the provided schema, applying extensions while + producing the copy. The original schema remains unaltered. + + When extending a schema with a known valid extension, it might be safe to + assume the schema is valid. Set `assume_valid` to true to assume the + produced schema is valid. + """ + + if not is_schema(schema): + raise TypeError('Must provide valid GraphQLSchema') + + if not isinstance(document_ast, DocumentNode): + 'Must provide valid Document AST' + + # Collect the type definitions and extensions found in the document. + type_definition_map: Dict[str, Any] = {} + type_extensions_map: Dict[str, Any] = defaultdict(list) + + # New directives and types are separate because a directives and types can + # have the same name. For example, a type named "skip". + directive_definitions: List[DirectiveDefinitionNode] = [] + + # Schema extensions are collected which may add additional operation types. + schema_extensions: List[SchemaExtensionNode] = [] + + for def_ in document_ast.definitions: + if isinstance(def_, SchemaDefinitionNode): + # Sanity check that a schema extension is not defining a new schema + raise GraphQLError( + 'Cannot define a new schema within a schema extension.', + [def_]) + elif isinstance(def_, SchemaExtensionNode): + schema_extensions.append(def_) + elif isinstance(def_, ( + ObjectTypeDefinitionNode, + InterfaceTypeDefinitionNode, + EnumTypeDefinitionNode, + UnionTypeDefinitionNode, + ScalarTypeDefinitionNode, + InputObjectTypeDefinitionNode)): + # Sanity check that none of the defined types conflict with the + # schema's existing types. + type_name = def_.name.value + if schema.get_type(type_name): + raise GraphQLError( + f"Type '{type_name}' already exists in the schema." + ' It cannot also be defined in this type definition.', + [def_]) + type_definition_map[type_name] = def_ + elif isinstance(def_, ( + ScalarTypeExtensionNode, + ObjectTypeExtensionNode, + InterfaceTypeExtensionNode, + EnumTypeExtensionNode, + InputObjectTypeExtensionNode, + UnionTypeExtensionNode)): + # Sanity check that this type extension exists within the + # schema's existing types. + extended_type_name = def_.name.value + existing_type = schema.get_type(extended_type_name) + if not existing_type: + raise GraphQLError( + f"Cannot extend type '{extended_type_name}'" + ' because it does not exist in the existing schema.', + [def_]) + check_extension_node(existing_type, def_) + type_extensions_map[extended_type_name].append(def_) + elif isinstance(def_, DirectiveDefinitionNode): + directive_name = def_.name.value + existing_directive = schema.get_directive(directive_name) + if existing_directive: + raise GraphQLError( + f"Directive '{directive_name}' already exists" + ' in the schema. It cannot be redefined.', [def_]) + directive_definitions.append(def_) + + # If this document contains no new types, extensions, or directives then + # return the same unmodified GraphQLSchema instance. + if (not type_extensions_map and not type_definition_map + and not directive_definitions and not schema_extensions): + return schema + + # Below are functions used for producing this schema that have closed over + # this scope and have access to the schema, cache, and newly defined types. + + def get_merged_directives() -> List[GraphQLDirective]: + if not schema.directives: + raise TypeError('schema must have default directives') + + return list(chain( + map(extend_directive, schema.directives), + map(ast_builder.build_directive, directive_definitions))) + + def extend_maybe_named_type( + type_: Optional[GraphQLNamedType]) -> Optional[GraphQLNamedType]: + return extend_named_type(type_) if type_ else None + + def extend_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: + if is_introspection_type(type_) or is_specified_scalar_type(type_): + # Builtin types are not extended. + return type_ + + name = type_.name + if name not in extend_type_cache: + if is_scalar_type(type_): + type_ = cast(GraphQLScalarType, type_) + extend_type_cache[name] = extend_scalar_type(type_) + elif is_object_type(type_): + type_ = cast(GraphQLObjectType, type_) + extend_type_cache[name] = extend_object_type(type_) + elif is_interface_type(type_): + type_ = cast(GraphQLInterfaceType, type_) + extend_type_cache[name] = extend_interface_type(type_) + elif is_enum_type(type_): + type_ = cast(GraphQLEnumType, type_) + extend_type_cache[name] = extend_enum_type(type_) + elif is_input_object_type(type_): + type_ = cast(GraphQLInputObjectType, type_) + extend_type_cache[name] = extend_input_object_type(type_) + elif is_union_type(type_): + type_ = cast(GraphQLUnionType, type_) + extend_type_cache[name] = extend_union_type(type_) + + return extend_type_cache[name] + + def extend_directive(directive: GraphQLDirective) -> GraphQLDirective: + return GraphQLDirective( + directive.name, + description=directive.description, + locations=directive.locations, + args=extend_args(directive.args), + ast_node=directive.ast_node) + + def extend_input_object_type( + type_: GraphQLInputObjectType) -> GraphQLInputObjectType: + name = type_.name + extension_ast_nodes = ( + list(type_.extension_ast_nodes) + type_extensions_map[name] + if type_.extension_ast_nodes else type_extensions_map[name] + ) if name in type_extensions_map else type_.extension_ast_nodes + return GraphQLInputObjectType( + name, + description=type_.description, + fields=lambda: extend_input_field_map(type_), + ast_node=type_.ast_node, + extension_ast_nodes=extension_ast_nodes) + + def extend_input_field_map( + type_: GraphQLInputObjectType) -> GraphQLInputFieldMap: + old_field_map = type_.fields + new_field_map = {field_name: GraphQLInputField( + cast(GraphQLInputType, extend_type(field.type)), + description=field.description, + default_value=field.default_value, + ast_node=field.ast_node) + for field_name, field in old_field_map.items()} + + # If there are any extensions to the fields, apply those here. + extensions = type_extensions_map.get(type_.name) + if extensions: + for extension in extensions: + for field in extension.fields: + field_name = field.name.value + if field_name in old_field_map: + raise GraphQLError( + f"Field '{type_.name}.{field_name}' already" + ' exists in the schema. It cannot also be defined' + ' in this type extension.', [field]) + new_field_map[field_name] = ast_builder.build_input_field( + field) + + return new_field_map + + def extend_enum_type(type_: GraphQLEnumType) -> GraphQLEnumType: + name = type_.name + extension_ast_nodes = ( + list(type_.extension_ast_nodes) + type_extensions_map[name] + if type_.extension_ast_nodes else type_extensions_map[name] + ) if name in type_extensions_map else type_.extension_ast_nodes + return GraphQLEnumType( + name, + description=type_.description, + values=extend_value_map(type_), + ast_node=type_.ast_node, + extension_ast_nodes=extension_ast_nodes) + + def extend_value_map(type_: GraphQLEnumType) -> GraphQLEnumValueMap: + old_value_map = type_.values + new_value_map = {value_name: GraphQLEnumValue( + value.value, + description=value.description, + deprecation_reason=value.deprecation_reason, + ast_node=value.ast_node) + for value_name, value in old_value_map.items()} + + # If there are any extensions to the values, apply those here. + extensions = type_extensions_map.get(type_.name) + if extensions: + for extension in extensions: + for value in extension.values: + value_name = value.name.value + if value_name in old_value_map: + raise GraphQLError( + f"Enum value '{type_.name}.{value_name}' already" + ' exists in the schema. It cannot also be defined' + ' in this type extension.', [value]) + new_value_map[value_name] = ast_builder.build_enum_value( + value) + + return new_value_map + + def extend_scalar_type(type_: GraphQLScalarType) -> GraphQLScalarType: + name = type_.name + extension_ast_nodes = ( + list(type_.extension_ast_nodes) + type_extensions_map[name] + if type_.extension_ast_nodes else type_extensions_map[name] + ) if name in type_extensions_map else type_.extension_ast_nodes + return GraphQLScalarType( + name, + serialize=type_.serialize, + description=type_.description, + parse_value=type_.parse_value, + parse_literal=type_.parse_literal, + ast_node=type_.ast_node, + extension_ast_nodes=extension_ast_nodes) + + def extend_object_type(type_: GraphQLObjectType) -> GraphQLObjectType: + name = type_.name + extension_ast_nodes = type_.extension_ast_nodes + try: + extensions = type_extensions_map[name] + except KeyError: + pass + else: + if extension_ast_nodes: + extension_ast_nodes = list( + extension_ast_nodes) + extensions + else: + extension_ast_nodes = extensions + return GraphQLObjectType( + type_.name, + description=type_.description, + interfaces=partial(extend_implemented_interfaces, type_), + fields=partial(extend_field_map, type_), + ast_node=type_.ast_node, + extension_ast_nodes=extension_ast_nodes, + is_type_of=type_.is_type_of) + + def extend_args(args: GraphQLArgumentMap) -> GraphQLArgumentMap: + return {arg_name: GraphQLArgument( + cast(GraphQLInputType, extend_type(arg.type)), + default_value=arg.default_value, + description=arg.description, + ast_node=arg.ast_node) + for arg_name, arg in args.items()} + + def extend_interface_type( + type_: GraphQLInterfaceType) -> GraphQLInterfaceType: + name = type_.name + extension_ast_nodes = type_.extension_ast_nodes + try: + extensions = type_extensions_map[name] + except KeyError: + pass + else: + if extension_ast_nodes: + extension_ast_nodes = list( + extension_ast_nodes) + extensions + else: + extension_ast_nodes = extensions + return GraphQLInterfaceType( + type_.name, + description=type_.description, + fields=partial(extend_field_map, type_), + ast_node=type_.ast_node, + extension_ast_nodes=extension_ast_nodes, + resolve_type=type_.resolve_type) + + def extend_union_type(type_: GraphQLUnionType) -> GraphQLUnionType: + name = type_.name + extension_ast_nodes = ( + list(type_.extension_ast_nodes) + type_extensions_map[name] + if type_.extension_ast_nodes else type_extensions_map[name] + ) if name in type_extensions_map else type_.extension_ast_nodes + return GraphQLUnionType( + name, + description=type_.description, + types=lambda: extend_possible_types(type_), + ast_node=type_.ast_node, + resolve_type=type_.resolve_type, + extension_ast_nodes=extension_ast_nodes) + + def extend_possible_types( + type_: GraphQLUnionType) -> List[GraphQLObjectType]: + possible_types = list(map(extend_named_type, type_.types)) + + # If there are any extensions to the union, apply those here. + extensions = type_extensions_map.get(type_.name) + if extensions: + for extension in extensions: + for named_type in extension.types: + # Note: While this could make early assertions to get the + # correctly typed values, that would throw immediately + # while type system validation with validate_schema() will + # produce more actionable results. + possible_types.append(ast_builder.build_type(named_type)) + + return cast(List[GraphQLObjectType], possible_types) + + def extend_implemented_interfaces( + type_: GraphQLObjectType) -> List[GraphQLInterfaceType]: + interfaces: List[GraphQLInterfaceType] = list( + map(cast(Callable[[GraphQLNamedType], GraphQLInterfaceType], + extend_named_type), type_.interfaces)) + + # If there are any extensions to the interfaces, apply those here. + for extension in type_extensions_map[type_.name]: + for named_type in extension.interfaces: + # Note: While this could make early assertions to get the + # correctly typed values, that would throw immediately while + # type system validation with validate_schema() will produce + # more actionable results. + interfaces.append( + cast(GraphQLInterfaceType, build_type(named_type))) + + return interfaces + + def extend_field_map( + type_: Union[GraphQLObjectType, GraphQLInterfaceType] + ) -> GraphQLFieldMap: + old_field_map = type_.fields + new_field_map = {field_name: GraphQLField( + cast(GraphQLObjectType, extend_type(field.type)), + description=field.description, + deprecation_reason=field.deprecation_reason, + args=extend_args(field.args), + ast_node=field.ast_node, + resolve=field.resolve) + for field_name, field in old_field_map.items()} + + # If there are any extensions to the fields, apply those here. + for extension in type_extensions_map[type_.name]: + for field in extension.fields: + field_name = field.name.value + if field_name in old_field_map: + raise GraphQLError( + f"Field '{type_.name}.{field_name}'" + ' already exists in the schema.' + ' It cannot also be defined in this type extension.', + [field]) + new_field_map[field_name] = build_field(field) + + return new_field_map + + # noinspection PyTypeChecker,PyUnresolvedReferences + def extend_type(type_def: GraphQLType) -> GraphQLType: + if is_list_type(type_def): + return GraphQLList(extend_type(type_def.of_type)) # type: ignore + if is_non_null_type(type_def): + return GraphQLNonNull( # type: ignore + extend_type(type_def.of_type)) # type: ignore + return extend_named_type(type_def) # type: ignore + + # noinspection PyShadowingNames + def resolve_type(type_ref: NamedTypeNode) -> GraphQLNamedType: + type_name = type_ref.name.value + existing_type = schema.get_type(type_name) + if existing_type: + return extend_named_type(existing_type) + raise GraphQLError( + f"Unknown type: '{type_name}'." + ' Ensure that this type exists either in the original schema,' + ' or is added in a type definition.', [type_ref]) + + ast_builder = ASTDefinitionBuilder( + type_definition_map, + assume_valid=assume_valid, resolve_type=resolve_type) + build_field = ast_builder.build_field + build_type = ast_builder.build_type + + extend_type_cache: Dict[str, GraphQLNamedType] = {} + + # Get the extended root operation types. + operation_types = { + OperationType.QUERY: extend_maybe_named_type(schema.query_type), + OperationType.MUTATION: extend_maybe_named_type(schema.mutation_type), + OperationType.SUBSCRIPTION: + extend_maybe_named_type(schema.subscription_type)} + + # Then, incorporate all schema extensions. + for schema_extension in schema_extensions: + if schema_extension.operation_types: + for operation_type in schema_extension.operation_types: + operation = operation_type.operation + if operation_types[operation]: + raise TypeError(f'Must provide only one {operation.value}' + ' type in schema.') + type_ref = operation_type.type + # Note: While this could make early assertions to get the + # correctly typed values, that would throw immediately while + # type system validation with validate_schema() will produce + # more actionable results + operation_types[operation] = ast_builder.build_type(type_ref) + + schema_extension_ast_nodes = ( + schema.extension_ast_nodes or cast(Tuple[SchemaExtensionNode], ()) + ) + tuple(schema_extensions) + + # Iterate through all types, getting the type definition for each, ensuring + # that any type not directly referenced by a value will get created. + types = list(map(extend_named_type, schema.type_map.values())) + # do the same with new types + types.extend(ast_builder.build_type(type_) + for type_ in type_definition_map.values()) + + # Then produce and return a Schema with these types. + return GraphQLSchema( # type: ignore + query=operation_types[OperationType.QUERY], + mutation=operation_types[OperationType.MUTATION], + subscription=operation_types[OperationType.SUBSCRIPTION], + types=types, + directives=get_merged_directives(), + ast_node=schema.ast_node, + extension_ast_nodes=schema_extension_ast_nodes) + + +def check_extension_node(type_: GraphQLNamedType, node: TypeExtensionNode): + if isinstance(node, ObjectTypeExtensionNode): + if not is_object_type(type_): + raise GraphQLError( + f"Cannot extend non-object type '{type_.name}'.", [node]) + elif isinstance(node, InterfaceTypeExtensionNode): + if not is_interface_type(type_): + raise GraphQLError( + f"Cannot extend non-interface type '{type_.name}'.", [node]) + elif isinstance(node, EnumTypeExtensionNode): + if not is_enum_type(type_): + raise GraphQLError( + f"Cannot extend non-enum type '{type_.name}'.", [node]) + elif isinstance(node, UnionTypeExtensionNode): + if not is_union_type(type_): + raise GraphQLError( + f"Cannot extend non-union type '{type_.name}'.", [node]) + elif isinstance(node, InputObjectTypeExtensionNode): + if not is_input_object_type(type_): + raise GraphQLError( + f"Cannot extend non-input object type '{type_.name}'.", [node]) diff --git a/graphql/utilities/find_breaking_changes.py b/graphql/utilities/find_breaking_changes.py new file mode 100644 index 00000000..d4dbd1ef --- /dev/null +++ b/graphql/utilities/find_breaking_changes.py @@ -0,0 +1,695 @@ +from enum import Enum +from typing import Dict, List, NamedTuple, Union, cast + +from ..error import INVALID +from ..language import DirectiveLocation +from ..type import ( + GraphQLArgument, GraphQLDirective, GraphQLEnumType, GraphQLInputObjectType, + GraphQLInterfaceType, GraphQLList, GraphQLNamedType, GraphQLNonNull, + GraphQLObjectType, GraphQLSchema, GraphQLType, GraphQLUnionType, + is_enum_type, is_input_object_type, is_interface_type, is_list_type, + is_named_type, is_non_null_type, is_object_type, is_scalar_type, + is_union_type) + +__all__ = [ + 'BreakingChange', 'BreakingChangeType', + 'DangerousChange', 'DangerousChangeType', + 'find_breaking_changes', 'find_dangerous_changes', + 'find_removed_types', 'find_types_that_changed_kind', + 'find_fields_that_changed_type_on_object_or_interface_types', + 'find_fields_that_changed_type_on_input_object_types', + 'find_types_removed_from_unions', 'find_values_removed_from_enums', + 'find_arg_changes', 'find_interfaces_removed_from_object_types', + 'find_removed_directives', 'find_removed_directive_args', + 'find_added_non_null_directive_args', + 'find_removed_locations_for_directive', + 'find_removed_directive_locations', 'find_values_added_to_enums', + 'find_interfaces_added_to_object_types', 'find_types_added_to_unions'] + + +class BreakingChangeType(Enum): + FIELD_CHANGED_KIND = 10 + FIELD_REMOVED = 11 + TYPE_CHANGED_KIND = 20 + TYPE_REMOVED = 21 + TYPE_REMOVED_FROM_UNION = 22 + VALUE_REMOVED_FROM_ENUM = 30 + ARG_REMOVED = 40 + ARG_CHANGED_KIND = 41 + NON_NULL_ARG_ADDED = 50 + NON_NULL_INPUT_FIELD_ADDED = 51 + INTERFACE_REMOVED_FROM_OBJECT = 60 + DIRECTIVE_REMOVED = 70 + DIRECTIVE_ARG_REMOVED = 71 + DIRECTIVE_LOCATION_REMOVED = 72 + NON_NULL_DIRECTIVE_ARG_ADDED = 73 + + +class DangerousChangeType(Enum): + ARG_DEFAULT_VALUE_CHANGE = 42 + VALUE_ADDED_TO_ENUM = 31 + INTERFACE_ADDED_TO_OBJECT = 61 + TYPE_ADDED_TO_UNION = 23 + NULLABLE_INPUT_FIELD_ADDED = 52 + NULLABLE_ARG_ADDED = 53 + + +class BreakingChange(NamedTuple): + type: BreakingChangeType + description: str + + +class DangerousChange(NamedTuple): + type: DangerousChangeType + description: str + + +class BreakingAndDangerousChanges(NamedTuple): + breaking_changes: List[BreakingChange] + dangerous_changes: List[DangerousChange] + + +def find_breaking_changes( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + """Find breaking changes. + + Given two schemas, returns a list containing descriptions of all the + types of breaking changes covered by the other functions down below. + """ + return ( + find_removed_types(old_schema, new_schema) + + find_types_that_changed_kind(old_schema, new_schema) + + find_fields_that_changed_type_on_object_or_interface_types( + old_schema, new_schema) + + find_fields_that_changed_type_on_input_object_types( + old_schema, new_schema).breaking_changes + + find_types_removed_from_unions(old_schema, new_schema) + + find_values_removed_from_enums(old_schema, new_schema) + + find_arg_changes(old_schema, new_schema).breaking_changes + + find_interfaces_removed_from_object_types(old_schema, new_schema) + + find_removed_directives(old_schema, new_schema) + + find_removed_directive_args(old_schema, new_schema) + + find_added_non_null_directive_args(old_schema, new_schema) + + find_removed_directive_locations(old_schema, new_schema)) + + +def find_dangerous_changes( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[DangerousChange]: + """Find dangerous changes. + + Given two schemas, returns a list containing descriptions of all the types + of potentially dangerous changes covered by the other functions down below. + """ + return ( + find_arg_changes(old_schema, new_schema).dangerous_changes + + find_values_added_to_enums(old_schema, new_schema) + + find_interfaces_added_to_object_types(old_schema, new_schema) + + find_types_added_to_unions(old_schema, new_schema) + + find_fields_that_changed_type_on_input_object_types( + old_schema, new_schema).dangerous_changes) + + +def find_removed_types( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + """Find removed types. + + Given two schemas, returns a list containing descriptions of any breaking + changes in the newSchema related to removing an entire type. + """ + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + + breaking_changes = [] + for type_name in old_type_map: + if type_name not in new_type_map: + breaking_changes.append(BreakingChange( + BreakingChangeType.TYPE_REMOVED, f'{type_name} was removed.')) + return breaking_changes + + +def find_types_that_changed_kind( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + """Find types that changed kind + + Given two schemas, returns a list containing descriptions of any breaking + changes in the newSchema related to changing the type of a type. + """ + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + + breaking_changes = [] + for type_name in old_type_map: + if type_name not in new_type_map: + continue + old_type = old_type_map[type_name] + new_type = new_type_map[type_name] + if old_type.__class__ is not new_type.__class__: + breaking_changes.append(BreakingChange( + BreakingChangeType.TYPE_CHANGED_KIND, + f'{type_name} changed from {type_kind_name(old_type)}' + f' to {type_kind_name(new_type)}.')) + return breaking_changes + + +def find_arg_changes( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> BreakingAndDangerousChanges: + """Find argument changes. + + Given two schemas, returns a list containing descriptions of any + breaking or dangerous changes in the new_schema related to arguments + (such as removal or change of type of an argument, or a change in an + argument's default value). + """ + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + + breaking_changes: List[BreakingChange] = [] + dangerous_changes: List[DangerousChange] = [] + + for type_name, old_type in old_type_map.items(): + new_type = new_type_map.get(type_name) + if (not (is_object_type(old_type) or is_interface_type(old_type)) or + not (is_object_type(new_type) or is_interface_type(new_type)) or + new_type.__class__ is not old_type.__class__): + continue + old_type = cast( + Union[GraphQLObjectType, GraphQLInterfaceType], old_type) + new_type = cast( + Union[GraphQLObjectType, GraphQLInterfaceType], new_type) + + old_type_fields = old_type.fields + new_type_fields = new_type.fields + for field_name in old_type_fields: + if field_name not in new_type_fields: + continue + + old_args = old_type_fields[field_name].args + new_args = new_type_fields[field_name].args + for arg_name, old_arg in old_args.items(): + new_arg = new_args.get(arg_name) + if not new_arg: + # Arg not present + breaking_changes.append(BreakingChange( + BreakingChangeType.ARG_REMOVED, + f'{old_type.name}.{field_name} arg' + f' {arg_name} was removed')) + continue + is_safe = is_change_safe_for_input_object_field_or_field_arg( + old_arg.type, new_arg.type) + if not is_safe: + breaking_changes.append(BreakingChange( + BreakingChangeType.ARG_CHANGED_KIND, + f'{old_type.name}.{field_name} arg' + f' {arg_name} has changed type from' + f' {old_arg.type} to {new_arg.type}')) + elif (old_arg.default_value is not INVALID and + old_arg.default_value != new_arg.default_value): + dangerous_changes.append(DangerousChange( + DangerousChangeType.ARG_DEFAULT_VALUE_CHANGE, + f'{old_type.name}.{field_name} arg' + f' {arg_name} has changed defaultValue')) + + # Check if a non-null arg was added to the field + for arg_name in new_args: + if arg_name not in old_args: + new_arg = new_args[arg_name] + if is_non_null_type(new_arg.type): + breaking_changes.append(BreakingChange( + BreakingChangeType.NON_NULL_ARG_ADDED, + f'A non-null arg {arg_name} on' + f' {new_type.name}.{field_name} was added')) + else: + dangerous_changes.append(DangerousChange( + DangerousChangeType.NULLABLE_ARG_ADDED, + f'A nullable arg {arg_name} on' + f' {new_type.name}.{field_name} was added')) + + return BreakingAndDangerousChanges(breaking_changes, dangerous_changes) + + +def type_kind_name(type_: GraphQLNamedType) -> str: + if is_scalar_type(type_): + return 'a Scalar type' + if is_object_type(type_): + return 'an Object type' + if is_interface_type(type_): + return 'an Interface type' + if is_union_type(type_): + return 'a Union type' + if is_enum_type(type_): + return 'an Enum type' + if is_input_object_type(type_): + return 'an Input type' + raise TypeError(f'Unknown type {type_.__class__.__name__}') + + +def find_fields_that_changed_type_on_object_or_interface_types( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + + breaking_changes = [] + for type_name, old_type in old_type_map.items(): + new_type = new_type_map.get(type_name) + if (not (is_object_type(old_type) or is_interface_type(old_type)) or + not (is_object_type(new_type) or is_interface_type(new_type)) or + new_type.__class__ is not old_type.__class__): + continue + old_type = cast( + Union[GraphQLObjectType, GraphQLInterfaceType], old_type) + new_type = cast( + Union[GraphQLObjectType, GraphQLInterfaceType], new_type) + + old_type_fields_def = old_type.fields + new_type_fields_def = new_type.fields + for field_name in old_type_fields_def: + # Check if the field is missing on the type in the new schema. + if field_name not in new_type_fields_def: + breaking_changes.append(BreakingChange( + BreakingChangeType.FIELD_REMOVED, + f'{type_name}.{field_name} was removed.')) + else: + old_field_type = old_type_fields_def[field_name].type + new_field_type = new_type_fields_def[field_name].type + is_safe = is_change_safe_for_object_or_interface_field( + old_field_type, new_field_type) + if not is_safe: + old_field_type_string = ( + old_field_type.name if is_named_type(old_field_type) + else str(old_field_type)) + new_field_type_string = ( + new_field_type.name if is_named_type(new_field_type) + else str(new_field_type)) + breaking_changes.append(BreakingChange( + BreakingChangeType.FIELD_CHANGED_KIND, + f'{type_name}.{field_name} changed type' + f' from {old_field_type_string}' + f' to {new_field_type_string}.')) + + return breaking_changes + + +def find_fields_that_changed_type_on_input_object_types( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> BreakingAndDangerousChanges: + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + + breaking_changes = [] + dangerous_changes = [] + for type_name, old_type in old_type_map.items(): + new_type = new_type_map.get(type_name) + if not (is_input_object_type(old_type) and + is_input_object_type(new_type)): + continue + old_type = cast(GraphQLInputObjectType, old_type) + new_type = cast(GraphQLInputObjectType, new_type) + + old_type_fields_def = old_type.fields + new_type_fields_def = new_type.fields + for field_name in old_type_fields_def: + # Check if the field is missing on the type in the new schema. + if field_name not in new_type_fields_def: + breaking_changes.append(BreakingChange( + BreakingChangeType.FIELD_REMOVED, + f'{type_name}.{field_name} was removed.')) + else: + old_field_type = old_type_fields_def[field_name].type + new_field_type = new_type_fields_def[field_name].type + + is_safe = is_change_safe_for_input_object_field_or_field_arg( + old_field_type, new_field_type) + if not is_safe: + old_field_type_string = ( + cast(GraphQLNamedType, old_field_type).name + if is_named_type(old_field_type) + else str(old_field_type)) + new_field_type_string = ( + cast(GraphQLNamedType, new_field_type).name + if is_named_type(new_field_type) + else str(new_field_type)) + breaking_changes.append(BreakingChange( + BreakingChangeType.FIELD_CHANGED_KIND, + f'{type_name}.{field_name} changed type' + f' from {old_field_type_string}' + f' to {new_field_type_string}.')) + + # Check if a field was added to the input object type + for field_name in new_type_fields_def: + if field_name not in old_type_fields_def: + if is_non_null_type(new_type_fields_def[field_name].type): + breaking_changes.append(BreakingChange( + BreakingChangeType.NON_NULL_INPUT_FIELD_ADDED, + f'A non-null field {field_name} on' + f' input type {new_type.name} was added.')) + else: + dangerous_changes.append(DangerousChange( + DangerousChangeType.NULLABLE_INPUT_FIELD_ADDED, + f'A nullable field {field_name} on' + f' input type {new_type.name} was added.')) + + return BreakingAndDangerousChanges(breaking_changes, dangerous_changes) + + +def is_change_safe_for_object_or_interface_field( + old_type: GraphQLType, new_type: GraphQLType) -> bool: + if is_named_type(old_type): + return ( + # if they're both named types, see if their names are equivalent + (is_named_type(new_type) and + cast(GraphQLNamedType, old_type).name == + cast(GraphQLNamedType, new_type).name) or + # moving from nullable to non-null of same underlying type is safe + (is_non_null_type(new_type) and + is_change_safe_for_object_or_interface_field( + old_type, cast(GraphQLNonNull, new_type).of_type))) + elif is_list_type(old_type): + return ( + # if they're both lists, make sure underlying types are compatible + (is_list_type(new_type) and + is_change_safe_for_object_or_interface_field( + cast(GraphQLList, old_type).of_type, + cast(GraphQLList, new_type).of_type)) or + # moving from nullable to non-null of same underlying type is safe + (is_non_null_type(new_type) and + is_change_safe_for_object_or_interface_field( + old_type, cast(GraphQLNonNull, new_type).of_type))) + elif is_non_null_type(old_type): + # if they're both non-null, make sure underlying types are compatible + return ( + is_non_null_type(new_type) and + is_change_safe_for_object_or_interface_field( + cast(GraphQLNonNull, old_type).of_type, + cast(GraphQLNonNull, new_type).of_type)) + else: + return False + + +def is_change_safe_for_input_object_field_or_field_arg( + old_type: GraphQLType, new_type: GraphQLType) -> bool: + if is_named_type(old_type): + # if they're both named types, see if their names are equivalent + return ( + is_named_type(new_type) and + cast(GraphQLNamedType, old_type).name == + cast(GraphQLNamedType, new_type).name) + elif is_list_type(old_type): + # if they're both lists, make sure underlying types are compatible + return ( + is_list_type(new_type) and + is_change_safe_for_input_object_field_or_field_arg( + cast(GraphQLList, old_type).of_type, + cast(GraphQLList, new_type).of_type)) + elif is_non_null_type(old_type): + return ( + # if they're both non-null, + # make sure the underlying types are compatible + (is_non_null_type(new_type) and + is_change_safe_for_input_object_field_or_field_arg( + cast(GraphQLNonNull, old_type).of_type, + cast(GraphQLNonNull, new_type).of_type)) or + # moving from non-null to nullable of same underlying type is safe + (not is_non_null_type(new_type) and + is_change_safe_for_input_object_field_or_field_arg( + cast(GraphQLNonNull, old_type).of_type, new_type))) + else: + return False + + +def find_types_removed_from_unions( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + """Find types removed from unions. + + Given two schemas, returns a list containing descriptions of any breaking + changes in the new_schema related to removing types from a union type. + """ + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + + types_removed_from_union = [] + for old_type_name, old_type in old_type_map.items(): + new_type = new_type_map.get(old_type_name) + if not (is_union_type(old_type) and is_union_type(new_type)): + continue + old_type = cast(GraphQLUnionType, old_type) + new_type = cast(GraphQLUnionType, new_type) + type_names_in_new_union = {type_.name for type_ in new_type.types} + for type_ in old_type.types: + type_name = type_.name + if type_name not in type_names_in_new_union: + types_removed_from_union.append(BreakingChange( + BreakingChangeType.TYPE_REMOVED_FROM_UNION, + f'{type_name} was removed' + f' from union type {old_type_name}.')) + return types_removed_from_union + + +def find_types_added_to_unions( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[DangerousChange]: + """Find types added to union. + + Given two schemas, returns a list containing descriptions of any dangerous + changes in the new_schema related to adding types to a union type. + """ + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + + types_added_to_union = [] + for new_type_name, new_type in new_type_map.items(): + old_type = old_type_map.get(new_type_name) + if not (is_union_type(old_type) and is_union_type(new_type)): + continue + old_type = cast(GraphQLUnionType, old_type) + new_type = cast(GraphQLUnionType, new_type) + type_names_in_old_union = {type_.name for type_ in old_type.types} + for type_ in new_type.types: + type_name = type_.name + if type_name not in type_names_in_old_union: + types_added_to_union.append(DangerousChange( + DangerousChangeType.TYPE_ADDED_TO_UNION, + f'{type_name} was added to union type {new_type_name}.')) + return types_added_to_union + + +def find_values_removed_from_enums( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + """Find values removed from enums. + + Given two schemas, returns a list containing descriptions of any breaking + changes in the new_schema related to removing values from an enum type. + """ + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + + values_removed_from_enums = [] + for type_name, old_type in old_type_map.items(): + new_type = new_type_map.get(type_name) + if not (is_enum_type(old_type) and is_enum_type(new_type)): + continue + old_type = cast(GraphQLEnumType, old_type) + new_type = cast(GraphQLEnumType, new_type) + values_in_new_enum = new_type.values + for value_name in old_type.values: + if value_name not in values_in_new_enum: + values_removed_from_enums.append(BreakingChange( + BreakingChangeType.VALUE_REMOVED_FROM_ENUM, + f'{value_name} was removed from enum type {type_name}.')) + return values_removed_from_enums + + +def find_values_added_to_enums( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[DangerousChange]: + """Find values added to enums. + + Given two schemas, returns a list containing descriptions of any dangerous + changes in the new_schema related to adding values to an enum type. + """ + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + + values_added_to_enums = [] + for type_name, old_type in old_type_map.items(): + new_type = new_type_map.get(type_name) + if not (is_enum_type(old_type) and is_enum_type(new_type)): + continue + old_type = cast(GraphQLEnumType, old_type) + new_type = cast(GraphQLEnumType, new_type) + values_in_old_enum = old_type.values + for value_name in new_type.values: + if value_name not in values_in_old_enum: + values_added_to_enums.append(DangerousChange( + DangerousChangeType.VALUE_ADDED_TO_ENUM, + f'{value_name} was added to enum type {type_name}.')) + return values_added_to_enums + + +def find_interfaces_removed_from_object_types( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + breaking_changes = [] + + for type_name, old_type in old_type_map.items(): + new_type = new_type_map.get(type_name) + if not (is_object_type(old_type) and is_object_type(new_type)): + continue + old_type = cast(GraphQLObjectType, old_type) + new_type = cast(GraphQLObjectType, new_type) + + old_interfaces = old_type.interfaces + new_interfaces = new_type.interfaces + for old_interface in old_interfaces: + if not any(interface.name == old_interface.name + for interface in new_interfaces): + breaking_changes.append(BreakingChange( + BreakingChangeType.INTERFACE_REMOVED_FROM_OBJECT, + f'{type_name} no longer implements interface' + f' {old_interface.name}.')) + + return breaking_changes + + +def find_interfaces_added_to_object_types( + old_schema: GraphQLSchema, new_schema: GraphQLSchema +) -> List[DangerousChange]: + old_type_map = old_schema.type_map + new_type_map = new_schema.type_map + interfaces_added_to_object_types = [] + + for type_name, new_type in new_type_map.items(): + old_type = old_type_map.get(type_name) + if not (is_object_type(old_type) and is_object_type(new_type)): + continue + old_type = cast(GraphQLObjectType, old_type) + new_type = cast(GraphQLObjectType, new_type) + + old_interfaces = old_type.interfaces + new_interfaces = new_type.interfaces + for new_interface in new_interfaces: + if not any(interface.name == new_interface.name + for interface in old_interfaces): + interfaces_added_to_object_types.append(DangerousChange( + DangerousChangeType.INTERFACE_ADDED_TO_OBJECT, + f'{new_interface.name} added to interfaces implemented' + f' by {type_name}.')) + + return interfaces_added_to_object_types + + +def find_removed_directives( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + removed_directives = [] + + new_schema_directive_map = get_directive_map_for_schema(new_schema) + for directive in old_schema.directives: + if directive.name not in new_schema_directive_map: + removed_directives.append(BreakingChange( + BreakingChangeType.DIRECTIVE_REMOVED, + f'{directive.name} was removed')) + + return removed_directives + + +def find_removed_args_for_directive( + old_directive: GraphQLDirective, new_directive: GraphQLDirective + ) -> List[str]: + new_arg_map = new_directive.args + return [arg_name for arg_name in old_directive.args + if arg_name not in new_arg_map] + + +def find_removed_directive_args( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + removed_directive_args = [] + old_schema_directive_map = get_directive_map_for_schema(old_schema) + + for new_directive in new_schema.directives: + old_directive = old_schema_directive_map.get(new_directive.name) + if not old_directive: + continue + + for arg_name in find_removed_args_for_directive( + old_directive, new_directive): + removed_directive_args.append(BreakingChange( + BreakingChangeType.DIRECTIVE_ARG_REMOVED, + f'{arg_name} was removed from {new_directive.name}')) + + return removed_directive_args + + +def find_added_args_for_directive( + old_directive: GraphQLDirective, new_directive: GraphQLDirective + ) -> Dict[str, GraphQLArgument]: + old_arg_map = old_directive.args + return {arg_name: arg for arg_name, arg in new_directive.args.items() + if arg_name not in old_arg_map} + + +def find_added_non_null_directive_args( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + added_non_nullable_args = [] + old_schema_directive_map = get_directive_map_for_schema(old_schema) + + for new_directive in new_schema.directives: + old_directive = old_schema_directive_map.get(new_directive.name) + if not old_directive: + continue + + for arg_name, arg in find_added_args_for_directive( + old_directive, new_directive).items(): + if not is_non_null_type(arg.type): + continue + + added_non_nullable_args.append(BreakingChange( + BreakingChangeType.NON_NULL_DIRECTIVE_ARG_ADDED, + f'A non-null arg {arg_name} on directive' + f' {new_directive.name} was added')) + + return added_non_nullable_args + + +def find_removed_locations_for_directive( + old_directive: GraphQLDirective, new_directive: GraphQLDirective + ) -> List[DirectiveLocation]: + new_location_set = set(new_directive.locations) + return [old_location for old_location in old_directive.locations + if old_location not in new_location_set] + + +def find_removed_directive_locations( + old_schema: GraphQLSchema, new_schema: GraphQLSchema + ) -> List[BreakingChange]: + removed_locations = [] + old_schema_directive_map = get_directive_map_for_schema(old_schema) + + for new_directive in new_schema.directives: + old_directive = old_schema_directive_map.get(new_directive.name) + if not old_directive: + continue + + for location in find_removed_locations_for_directive( + old_directive, new_directive): + removed_locations.append(BreakingChange( + BreakingChangeType.DIRECTIVE_LOCATION_REMOVED, + f'{location.name} was removed from {new_directive.name}')) + + return removed_locations + + +def get_directive_map_for_schema( + schema: GraphQLSchema) -> Dict[str, GraphQLDirective]: + return {directive.name: directive for directive in schema.directives} diff --git a/graphql/utilities/find_deprecated_usages.py b/graphql/utilities/find_deprecated_usages.py new file mode 100644 index 00000000..3ac08f85 --- /dev/null +++ b/graphql/utilities/find_deprecated_usages.py @@ -0,0 +1,55 @@ +from typing import List + +from ..error import GraphQLError +from ..language import DocumentNode, TypeInfoVisitor, Visitor, visit +from ..type import GraphQLSchema, get_named_type +from .type_info import TypeInfo + + +__all__ = ['find_deprecated_usages'] + + +def find_deprecated_usages( + schema: GraphQLSchema, ast: DocumentNode) -> List[GraphQLError]: + """Get a list of GraphQLError instances describing each deprecated use.""" + + type_info = TypeInfo(schema) + visitor = FindDeprecatedUsages(type_info) + visit(ast, TypeInfoVisitor(type_info, visitor)) + return visitor.errors + + +class FindDeprecatedUsages(Visitor): + """A validation rule which reports deprecated usages.""" + + type_info: TypeInfo + errors: List[GraphQLError] + + def __init__(self, type_info: TypeInfo) -> None: + super().__init__() + self.type_info = type_info + self.errors = [] + + def enter_field(self, node, *_args): + field_def = self.type_info.get_field_def() + if field_def and field_def.is_deprecated: + parent_type = self.type_info.get_parent_type() + if parent_type: + field_name = node.name.value + reason = field_def.deprecation_reason + self.errors.append(GraphQLError( + f'The field {parent_type.name}.{field_name}' + ' is deprecated.' + (f' {reason}' if reason else ''), + [node])) + + def enter_enum_value(self, node, *_args): + enum_val = self.type_info.get_enum_value() + if enum_val and enum_val.is_deprecated: + type_ = get_named_type(self.type_info.get_input_type()) + if type_: + enum_val_name = node.value + reason = enum_val.deprecation_reason + self.errors.append(GraphQLError( + f'The enum value {type_.name}.{enum_val_name}' + ' is deprecated.' + (f' {reason}' if reason else ''), + [node])) diff --git a/graphql/utilities/get_operation_ast.py b/graphql/utilities/get_operation_ast.py new file mode 100644 index 00000000..09d1f29a --- /dev/null +++ b/graphql/utilities/get_operation_ast.py @@ -0,0 +1,29 @@ +from typing import Optional + +from ..language import DocumentNode, OperationDefinitionNode + +__all__ = ['get_operation_ast'] + + +def get_operation_ast( + document_ast: DocumentNode, operation_name: Optional[str]=None + ) -> Optional[OperationDefinitionNode]: + """Get operation AST node. + + Returns an operation AST given a document AST and optionally an operation + name. If a name is not provided, an operation is only returned if only one + is provided in the document. + """ + operation = None + for definition in document_ast.definitions: + if isinstance(definition, OperationDefinitionNode): + if not operation_name: + # If no operation name was provided, only return an Operation + # if there is one defined in the document. + # Upon encountering the second, return None. + if operation: + return None + operation = definition + elif definition.name and definition.name.value == operation_name: + return definition + return operation diff --git a/graphql/utilities/get_operation_root_type.py b/graphql/utilities/get_operation_root_type.py new file mode 100644 index 00000000..7cb8de39 --- /dev/null +++ b/graphql/utilities/get_operation_root_type.py @@ -0,0 +1,39 @@ +from typing import Union + +from ..error import GraphQLError +from ..language import ( + OperationType, OperationDefinitionNode, OperationTypeDefinitionNode) +from ..type import GraphQLObjectType, GraphQLSchema + +__all__ = ['get_operation_root_type'] + + +def get_operation_root_type( + schema: GraphQLSchema, + operation: Union[OperationDefinitionNode, OperationTypeDefinitionNode] + ) -> GraphQLObjectType: + """Extract the root type of the operation from the schema.""" + operation_type = operation.operation + if operation_type == OperationType.QUERY: + query_type = schema.query_type + if not query_type: + raise GraphQLError( + 'Schema does not define the required query root type.', + [operation]) + return query_type + elif operation_type == OperationType.MUTATION: + mutation_type = schema.mutation_type + if not mutation_type: + raise GraphQLError( + 'Schema is not configured for mutations.', [operation]) + return mutation_type + elif operation_type == OperationType.SUBSCRIPTION: + subscription_type = schema.subscription_type + if not subscription_type: + raise GraphQLError( + 'Schema is not configured for subscriptions.', [operation]) + return subscription_type + else: + raise GraphQLError( + 'Can only have query, mutation and subscription operations.', + [operation]) diff --git a/graphql/utilities/introspection_from_schema.py b/graphql/utilities/introspection_from_schema.py new file mode 100644 index 00000000..fbc5736b --- /dev/null +++ b/graphql/utilities/introspection_from_schema.py @@ -0,0 +1,34 @@ +from typing import Any, Dict + +from ..error import GraphQLError +from ..language import parse +from ..type import GraphQLSchema +from ..utilities.introspection_query import get_introspection_query + +__all__ = ['introspection_from_schema'] + + +IntrospectionSchema = Dict[str, Any] + + +def introspection_from_schema( + schema: GraphQLSchema, + descriptions: bool=True) -> IntrospectionSchema: + """Build an IntrospectionQuery from a GraphQLSchema + + IntrospectionQuery is useful for utilities that care about type and field + relationships, but do not need to traverse through those relationships. + + This is the inverse of build_client_schema. The primary use case is outside + of the server context, for instance when doing schema comparisons. + """ + query_ast = parse(get_introspection_query(descriptions)) + + from ..execution.execute import execute, ExecutionResult + result = execute(schema, query_ast) + if not isinstance(result, ExecutionResult): + raise RuntimeError('Introspection cannot be executed') + if result.errors or not result.data: + raise result.errors[0] if result.errors else GraphQLError( + 'Introspection did not return a result') + return result.data diff --git a/graphql/utilities/introspection_query.py b/graphql/utilities/introspection_query.py new file mode 100644 index 00000000..9c170223 --- /dev/null +++ b/graphql/utilities/introspection_query.py @@ -0,0 +1,100 @@ +from textwrap import dedent + +__all__ = ['get_introspection_query'] + + +def get_introspection_query(descriptions=True) -> str: + """Get a query for introspection, optionally without descriptions.""" + return dedent(f""" + query IntrospectionQuery {{ + __schema {{ + queryType {{ name }} + mutationType {{ name }} + subscriptionType {{ name }} + types {{ + ...FullType + }} + directives {{ + name + {'description' if descriptions else ''} + locations + args {{ + ...InputValue + }} + }} + }} + }} + + fragment FullType on __Type {{ + kind + name + {'description' if descriptions else ''} + fields(includeDeprecated: true) {{ + name + {'description' if descriptions else ''} + args {{ + ...InputValue + }} + type {{ + ...TypeRef + }} + isDeprecated + deprecationReason + }} + inputFields {{ + ...InputValue + }} + interfaces {{ + ...TypeRef + }} + enumValues(includeDeprecated: true) {{ + name + {'description' if descriptions else ''} + isDeprecated + deprecationReason + }} + possibleTypes {{ + ...TypeRef + }} + }} + + fragment InputValue on __InputValue {{ + name + {'description' if descriptions else ''} + type {{ ...TypeRef }} + defaultValue + }} + + fragment TypeRef on __Type {{ + kind + name + ofType {{ + kind + name + ofType {{ + kind + name + ofType {{ + kind + name + ofType {{ + kind + name + ofType {{ + kind + name + ofType {{ + kind + name + ofType {{ + kind + name + }} + }} + }} + }} + }} + }} + }} + }} + """) diff --git a/graphql/utilities/lexicographic_sort_schema.py b/graphql/utilities/lexicographic_sort_schema.py new file mode 100644 index 00000000..0accba34 --- /dev/null +++ b/graphql/utilities/lexicographic_sort_schema.py @@ -0,0 +1,142 @@ +from operator import attrgetter +from typing import Collection, Dict, List, cast + +from ..type import ( + GraphQLArgument, GraphQLDirective, GraphQLEnumType, + GraphQLEnumValue, GraphQLField, GraphQLInputField, GraphQLInputObjectType, + GraphQLInterfaceType, GraphQLList, GraphQLNamedType, GraphQLNonNull, + GraphQLObjectType, GraphQLSchema, GraphQLUnionType, + is_enum_type, is_input_object_type, is_interface_type, + is_introspection_type, is_list_type, is_non_null_type, is_object_type, + is_scalar_type, is_specified_scalar_type, is_union_type) + +__all__ = ['lexicographic_sort_schema'] + + +def lexicographic_sort_schema(schema: GraphQLSchema) -> GraphQLSchema: + """Sort GraphQLSchema.""" + + cache: Dict[str, GraphQLNamedType] = {} + + def sort_maybe_type(maybe_type): + return maybe_type and sort_named_type(maybe_type) + + def sort_directive(directive): + return GraphQLDirective( + name=directive.name, + description=directive.description, + locations=sorted(directive.locations, key=attrgetter('name')), + args=sort_args(directive.args), + ast_node=directive.ast_node) + + def sort_args(args): + return {name: GraphQLArgument( + sort_type(arg.type), + default_value=arg.default_value, + description=arg.description, + ast_node=arg.ast_node) + for name, arg in sorted(args.items())} + + def sort_fields(fields_map): + return {name: GraphQLField( + sort_type(field.type), + args=sort_args(field.args), + resolve=field.resolve, + subscribe=field.subscribe, + description=field.description, + deprecation_reason=field.deprecation_reason, + ast_node=field.ast_node) + for name, field in sorted(fields_map.items())} + + def sort_input_fields(fields_map): + return {name: GraphQLInputField( + sort_type(field.type), + description=field.description, + default_value=field.default_value, + ast_node=field.ast_node) + for name, field in sorted(fields_map.items())} + + def sort_type(type_): + if is_list_type(type_): + return GraphQLList(sort_type(type_.of_type)) + elif is_non_null_type(type_): + return GraphQLNonNull(sort_type(type_.of_type)) + else: + return sort_named_type(type_) + + def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: + if is_specified_scalar_type(type_) or is_introspection_type(type_): + return type_ + + sorted_type = cache.get(type_.name) + if not sorted_type: + sorted_type = sort_named_type_impl(type_) + cache[type_.name] = sorted_type + return sorted_type + + def sort_types( + arr: Collection[GraphQLNamedType]) -> List[GraphQLNamedType]: + return [sort_named_type(type_) + for type_ in sorted(arr, key=attrgetter('name'))] + + def sort_named_type_impl(type_: GraphQLNamedType) -> GraphQLNamedType: + if is_scalar_type(type_): + return type_ + elif is_object_type(type_): + type1 = cast(GraphQLObjectType, type_) + return GraphQLObjectType( + type_.name, + interfaces=lambda: cast( + List[GraphQLInterfaceType], sort_types(type1.interfaces)), + fields=lambda: sort_fields(type1.fields), + is_type_of=type1.is_type_of, + description=type_.description, + ast_node=type1.ast_node, + extension_ast_nodes=type1.extension_ast_nodes) + elif is_interface_type(type_): + type2 = cast(GraphQLInterfaceType, type_) + return GraphQLInterfaceType( + type_.name, + fields=lambda: sort_fields(type2.fields), + resolve_type=type2.resolve_type, + description=type_.description, + ast_node=type2.ast_node, + extension_ast_nodes=type2.extension_ast_nodes) + elif is_union_type(type_): + type3 = cast(GraphQLUnionType, type_) + return GraphQLUnionType( + type_.name, + types=lambda: cast( + List[GraphQLObjectType], sort_types(type3.types)), + resolve_type=type3.resolve_type, + description=type_.description, + ast_node=type3.ast_node) + elif is_enum_type(type_): + type4 = cast(GraphQLEnumType, type_) + return GraphQLEnumType( + type_.name, + values={name: GraphQLEnumValue( + val.value, + description=val.description, + deprecation_reason=val.deprecation_reason, + ast_node=val.ast_node) + for name, val in sorted(type4.values.items())}, + description=type_.description, + ast_node=type4.ast_node) + elif is_input_object_type(type_): + type5 = cast(GraphQLInputObjectType, type_) + return GraphQLInputObjectType( + type_.name, + sort_input_fields(type5.fields), + description=type_.description, + ast_node=type5.ast_node) + raise TypeError(f"Unknown type: '{type_}'") + + return GraphQLSchema( + types=sort_types(schema.type_map.values()), + directives=[sort_directive(directive) for directive in sorted( + schema.directives, key=attrgetter('name'))], + query=sort_maybe_type(schema.query_type), + mutation=sort_maybe_type(schema.mutation_type), + subscription=sort_maybe_type(schema.subscription_type), + ast_node=schema.ast_node) diff --git a/graphql/utilities/schema_printer.py b/graphql/utilities/schema_printer.py new file mode 100644 index 00000000..fcd5e39a --- /dev/null +++ b/graphql/utilities/schema_printer.py @@ -0,0 +1,286 @@ +import re +from itertools import chain +from typing import Any, Callable, Dict, List, Optional, Union, cast + +from ..language import print_ast +from ..pyutils import is_invalid, is_nullish +from ..type import ( + DEFAULT_DEPRECATION_REASON, GraphQLArgument, + GraphQLDirective, GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLInputObjectType, GraphQLInputType, GraphQLInterfaceType, + GraphQLNamedType, GraphQLObjectType, GraphQLScalarType, GraphQLSchema, + GraphQLString, GraphQLUnionType, is_enum_type, is_input_object_type, + is_interface_type, is_introspection_type, is_object_type, is_scalar_type, + is_specified_directive, is_specified_scalar_type, is_union_type) +from .ast_from_value import ast_from_value + +__all__ = [ + 'print_schema', 'print_introspection_schema', 'print_type', 'print_value'] + + +def print_schema(schema: GraphQLSchema) -> str: + return print_filtered_schema( + schema, lambda n: not is_specified_directive(n), is_defined_type) + + +def print_introspection_schema(schema: GraphQLSchema) -> str: + return print_filtered_schema( + schema, is_specified_directive, is_introspection_type) + + +def is_defined_type(type_: GraphQLNamedType) -> bool: + return (not is_specified_scalar_type(type_) and + not is_introspection_type(type_)) + + +def print_filtered_schema( + schema: GraphQLSchema, + directive_filter: Callable[[GraphQLDirective], bool], + type_filter: Callable[[GraphQLNamedType], bool]) -> str: + directives = filter(directive_filter, schema.directives) + type_map = schema.type_map + types = filter( # type: ignore + type_filter, map(type_map.get, sorted(type_map))) + + return '\n\n'.join(chain(filter(None, [ + print_schema_definition(schema)]), + (print_directive(directive) for directive in directives), + (print_type(type_) for type_ in types))) + '\n' # type: ignore + + +def print_schema_definition(schema: GraphQLSchema) -> Optional[str]: + if is_schema_of_common_names(schema): + return None + + operation_types = [] + + query_type = schema.query_type + if query_type: + operation_types.append(f' query: {query_type.name}') + + mutation_type = schema.mutation_type + if mutation_type: + operation_types.append(f' mutation: {mutation_type.name}') + + subscription_type = schema.subscription_type + if subscription_type: + operation_types.append(f' subscription: {subscription_type.name}') + + return 'schema {\n' + '\n'.join(operation_types) + '\n}' + + +def is_schema_of_common_names(schema: GraphQLSchema) -> bool: + """Check whether this schema uses the common naming convention. + + GraphQL schema define root types for each type of operation. These types + are the same as any other type and can be named in any manner, however + there is a common naming convention: + + schema { + query: Query + mutation: Mutation + } + + When using this naming convention, the schema description can be omitted. + """ + query_type = schema.query_type + if query_type and query_type.name != 'Query': + return False + + mutation_type = schema.mutation_type + if mutation_type and mutation_type.name != 'Mutation': + return False + + subscription_type = schema.subscription_type + if subscription_type and subscription_type.name != 'Subscription': + return False + + return True + + +def print_type(type_: GraphQLNamedType) -> str: + if is_scalar_type(type_): + type_ = cast(GraphQLScalarType, type_) + return print_scalar(type_) + if is_object_type(type_): + type_ = cast(GraphQLObjectType, type_) + return print_object(type_) + if is_interface_type(type_): + type_ = cast(GraphQLInterfaceType, type_) + return print_interface(type_) + if is_union_type(type_): + type_ = cast(GraphQLUnionType, type_) + return print_union(type_) + if is_enum_type(type_): + type_ = cast(GraphQLEnumType, type_) + return print_enum(type_) + if is_input_object_type(type_): + type_ = cast(GraphQLInputObjectType, type_) + return print_input_object(type_) + raise TypeError(f'Unknown type: {type_!r}') + + +def print_scalar(type_: GraphQLScalarType) -> str: + return print_description(type_) + f'scalar {type_.name}' + + +def print_object(type_: GraphQLObjectType) -> str: + interfaces = type_.interfaces + implemented_interfaces = ( + ' implements ' + ' & '.join(i.name for i in interfaces) + ) if interfaces else '' + return (print_description(type_) + + f'type {type_.name}{implemented_interfaces} ' + + '{\n' + print_fields(type_) + '\n}') + + +def print_interface(type_: GraphQLInterfaceType) -> str: + return (print_description(type_) + + f'interface {type_.name} ' + + '{\n' + print_fields(type_) + '\n}') + + +def print_union(type_: GraphQLUnionType) -> str: + return (print_description(type_) + + f'union {type_.name} = ' + ' | '.join( + t.name for t in type_.types)) + + +def print_enum(type_: GraphQLEnumType) -> str: + return (print_description(type_) + + f'enum {type_.name} ' + + '{\n' + print_enum_values(type_.values) + '\n}') + + +def print_enum_values(values: Dict[str, GraphQLEnumValue]) -> str: + return '\n'.join( + print_description(value, ' ', not i) + + f' {name}' + print_deprecated(value) + for i, (name, value) in enumerate(values.items())) + + +def print_input_object(type_: GraphQLInputObjectType) -> str: + fields = type_.fields.items() + return (print_description(type_) + + f'input {type_.name} ' + '{\n' + + '\n'.join( + print_description(field, ' ', not i) + ' ' + + print_input_value(name, field) + for i, (name, field) in enumerate(fields)) + '\n}') + + +def print_fields(type_: Union[GraphQLObjectType, GraphQLInterfaceType]) -> str: + fields = type_.fields.items() + return '\n'.join( + print_description(field, ' ', not i) + f' {name}' + + print_args(field.args, ' ') + f': {field.type}' + + print_deprecated(field) + for i, (name, field) in enumerate(fields)) + + +def print_args(args: Dict[str, GraphQLArgument], indentation='') -> str: + if not args: + return '' + + # If every arg does not have a description, print them on one line. + if not any(arg.description for arg in args.values()): + return '(' + ', '.join( + print_input_value(name, arg) for name, arg in args.items()) + ')' + + return ('(\n' + '\n'.join( + print_description(arg, f' {indentation}', not i) + + f' {indentation}' + print_input_value(name, arg) + for i, (name, arg) in enumerate(args.items())) + f'\n{indentation})') + + +def print_input_value(name: str, arg: GraphQLArgument) -> str: + arg_decl = f'{name}: {arg.type}' + if not is_invalid(arg.default_value): + arg_decl += f' = {print_value(arg.default_value, arg.type)}' + return arg_decl + + +def print_directive(directive: GraphQLDirective) -> str: + return (print_description(directive) + + f'directive @{directive.name}' + + print_args(directive.args) + + ' on ' + ' | '.join( + location.name for location in directive.locations)) + + +def print_deprecated( + field_or_enum_value: Union[GraphQLField, GraphQLEnumValue]) -> str: + if not field_or_enum_value.is_deprecated: + return '' + reason = field_or_enum_value.deprecation_reason + if (is_nullish(reason) or reason == '' or + reason == DEFAULT_DEPRECATION_REASON): + return ' @deprecated' + else: + return f' @deprecated(reason: {print_value(reason, GraphQLString)})' + + +def print_description( + type_: Union[GraphQLArgument, GraphQLDirective, + GraphQLEnumValue, GraphQLNamedType], + indentation='', first_in_block=True) -> str: + if not type_.description: + return '' + lines = description_lines(type_.description, 120 - len(indentation)) + + description = [] + if indentation and not first_in_block: + description.append('\n') + description.extend([indentation, '"""']) + + if len(lines) == 1 and len(lines[0]) < 70 and not lines[0].endswith('"'): + # In some circumstances, a single line can be used for the description. + description.extend([escape_quote(lines[0]), '"""\n']) + else: + # Format a multi-line block quote to account for leading space. + has_leading_space = lines and lines[0].startswith((' ', '\t')) + if not has_leading_space: + description.append('\n') + for i, line in enumerate(lines): + if i or not has_leading_space: + description.append(indentation) + description.extend([escape_quote(line), '\n']) + description.extend([indentation, '"""\n']) + + return ''.join(description) + + +def escape_quote(line: str) -> str: + return line.replace('"""', '\\"""') + + +def description_lines(description: str, max_len: int) -> List[str]: + lines: List[str] = [] + append_line, extend_lines = lines.append, lines.extend + raw_lines = description.splitlines() + for raw_line in raw_lines: + if raw_line: + # For > 120 character long lines, cut at space boundaries into + # sublines of ~80 chars. + extend_lines(break_line(raw_line, max_len)) + else: + append_line(raw_line) + return lines + + +def break_line(line: str, max_len: int) -> List[str]: + if len(line) < max_len + 5: + return [line] + parts = re.split(f'((?: |^).{{15,{max_len - 40}}}(?= |$))', line) + if len(parts) < 4: + return [line] + sublines = [parts[0] + parts[1] + parts[2]] + append_subline = sublines.append + for i in range(3, len(parts), 2): + append_subline(parts[i][1:] + parts[i + 1]) + return sublines + + +def print_value(value: Any, type_: GraphQLInputType) -> str: + """Convenience function for printing a Python value""" + return print_ast(ast_from_value(value, type_)) # type: ignore diff --git a/graphql/utilities/separate_operations.py b/graphql/utilities/separate_operations.py new file mode 100644 index 00000000..b06e40fd --- /dev/null +++ b/graphql/utilities/separate_operations.py @@ -0,0 +1,98 @@ +from collections import defaultdict +from typing import Dict, List, Set + +from ..language import ( + DocumentNode, ExecutableDefinitionNode, FragmentDefinitionNode, + OperationDefinitionNode, Visitor, visit) + +__all__ = ['separate_operations'] + + +DepGraph = Dict[str, Set[str]] + + +def separate_operations(document_ast: DocumentNode) -> Dict[str, DocumentNode]: + """Separate operations in a given AST document. + + separate_operations accepts a single AST document which may contain many + operations and fragments and returns a collection of AST documents each of + which contains a single operation as well the fragment definitions it + refers to. + """ + + # Populate metadata and build a dependency graph. + visitor = SeparateOperations() + visit(document_ast, visitor) + operations = visitor.operations + fragments = visitor.fragments + positions = visitor.positions + dep_graph = visitor.dep_graph + + # For each operation, produce a new synthesized AST which includes only + # what is necessary for completing that operation. + separated_document_asts = {} + for operation in operations: + operation_name = op_name(operation) + dependencies: Set[str] = set() + collect_transitive_dependencies( + dependencies, dep_graph, operation_name) + + # The list of definition nodes to be included for this operation, + # sorted to retain the same order as the original document. + definitions: List[ExecutableDefinitionNode] = [operation] + for name in dependencies: + definitions.append(fragments[name]) + definitions.sort(key=lambda n: positions.get(n, 0)) + + separated_document_asts[operation_name] = DocumentNode( + definitions=definitions) + + return separated_document_asts + + +class SeparateOperations(Visitor): + + def __init__(self): + super().__init__() + self.operations: List[OperationDefinitionNode] = [] + self.fragments: Dict[str, FragmentDefinitionNode] = {} + self.positions: Dict[ExecutableDefinitionNode, int] = {} + self.dep_graph: DepGraph = defaultdict(set) + self.from_name: str = None + self.idx = 0 + + def enter_operation_definition(self, node, *_args): + self.from_name = op_name(node) + self.operations.append(node) + self.positions[node] = self.idx + self.idx += 1 + + def enter_fragment_definition(self, node, *_args): + self.from_name = node.name.value + self.fragments[self.from_name] = node + self.positions[node] = self.idx + self.idx += 1 + + def enter_fragment_spread(self, node, *_args): + to_name = node.name.value + self.dep_graph[self.from_name].add(to_name) + + +def op_name(operation: OperationDefinitionNode) -> str: + """Provide the empty string for anonymous operations.""" + return operation.name.value if operation.name else '' + + +def collect_transitive_dependencies( + collected: Set[str], dep_graph: DepGraph, + from_name: str) -> None: + """Collect transitive dependencies. + + From a dependency graph, collects a list of transitive dependencies by + recursing through a dependency graph. + """ + immediate_deps = dep_graph[from_name] + for to_name in immediate_deps: + if to_name not in collected: + collected.add(to_name) + collect_transitive_dependencies(collected, dep_graph, to_name) diff --git a/graphql/utilities/type_comparators.py b/graphql/utilities/type_comparators.py new file mode 100644 index 00000000..e4d626d3 --- /dev/null +++ b/graphql/utilities/type_comparators.py @@ -0,0 +1,112 @@ +from typing import cast + +from ..type import ( + GraphQLAbstractType, GraphQLList, GraphQLNonNull, GraphQLObjectType, + GraphQLSchema, GraphQLType, + is_abstract_type, is_list_type, is_non_null_type, is_object_type) + +__all__ = ['is_equal_type', 'is_type_sub_type_of', 'do_types_overlap'] + + +def is_equal_type(type_a: GraphQLType, type_b: GraphQLType): + """Check whether two types are equal. + + Provided two types, return true if the types are equal (invariant).""" + # Equivalent types are equal. + if type_a is type_b: + return True + + # If either type is non-null, the other must also be non-null. + if is_non_null_type(type_a) and is_non_null_type(type_b): + # noinspection PyUnresolvedReferences + return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore + + # If either type is a list, the other must also be a list. + if is_list_type(type_a) and is_list_type(type_b): + # noinspection PyUnresolvedReferences + return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore + + # Otherwise the types are not equal. + return False + + +# noinspection PyUnresolvedReferences +def is_type_sub_type_of( + schema: GraphQLSchema, + maybe_subtype: GraphQLType, super_type: GraphQLType) -> bool: + """Check whether a type is subtype of another type in a given schema. + + Provided a type and a super type, return true if the first type is either + equal or a subset of the second super type (covariant). + """ + # Equivalent type is a valid subtype + if maybe_subtype is super_type: + return True + + # If super_type is non-null, maybe_subtype must also be non-null. + if is_non_null_type(super_type): + if is_non_null_type(maybe_subtype): + return is_type_sub_type_of( + schema, cast(GraphQLNonNull, maybe_subtype).of_type, + cast(GraphQLNonNull, super_type).of_type) + return False + elif is_non_null_type(maybe_subtype): + # If super_type is nullable, maybe_subtype may be non-null or nullable. + return is_type_sub_type_of( + schema, cast(GraphQLNonNull, maybe_subtype).of_type, super_type) + + # If superType type is a list, maybeSubType type must also be a list. + if is_list_type(super_type): + if is_list_type(maybe_subtype): + return is_type_sub_type_of( + schema, cast(GraphQLList, maybe_subtype).of_type, + cast(GraphQLList, super_type).of_type) + return False + elif is_list_type(maybe_subtype): + # If super_type is not a list, maybe_subtype must also be not a list. + return False + + # If super_type type is an abstract type, maybe_subtype type may be a + # currently possible object type. + # noinspection PyTypeChecker + if (is_abstract_type(super_type) and + is_object_type(maybe_subtype) and + schema.is_possible_type( + cast(GraphQLAbstractType, super_type), + cast(GraphQLObjectType, maybe_subtype))): + return True + + # Otherwise, the child type is not a valid subtype of the parent type. + return False + + +def do_types_overlap(schema, type_a, type_b): + """Check whether two types overlap in a given schema. + + Provided two composite types, determine if they "overlap". Two composite + types overlap when the Sets of possible concrete types for each intersect. + + This is often used to determine if a fragment of a given type could + possibly be visited in a context of another type. + + This function is commutative. + """ + # Equivalent types overlap + if type_a is type_b: + return True + + if is_abstract_type(type_a): + if is_abstract_type(type_b): + # If both types are abstract, then determine if there is any + # intersection between possible concrete types of each. + return any(schema.is_possible_type(type_b, type_) + for type_ in schema.get_possible_types(type_a)) + # Determine if latter type is a possible concrete type of the former. + return schema.is_possible_type(type_a, type_b) + + if is_abstract_type(type_b): + # Determine if former type is a possible concrete type of the latter. + return schema.is_possible_type(type_b, type_a) + + # Otherwise the types do not overlap. + return False diff --git a/graphql/utilities/type_from_ast.py b/graphql/utilities/type_from_ast.py new file mode 100644 index 00000000..6be29c4e --- /dev/null +++ b/graphql/utilities/type_from_ast.py @@ -0,0 +1,52 @@ +from typing import Optional, overload + +from ..language import ( + TypeNode, NamedTypeNode, ListTypeNode, NonNullTypeNode) +from ..type import ( + GraphQLType, GraphQLSchema, GraphQLNamedType, GraphQLList, GraphQLNonNull) + +__all__ = ['type_from_ast'] + + +@overload +def type_from_ast(schema: GraphQLSchema, + type_node: NamedTypeNode) -> Optional[GraphQLNamedType]: + ... + + +@overload # noqa: F811 (pycqa/flake8#423) +def type_from_ast(schema: GraphQLSchema, + type_node: ListTypeNode) -> Optional[GraphQLList]: + ... + + +@overload # noqa: F811 +def type_from_ast(schema: GraphQLSchema, + type_node: NonNullTypeNode) -> Optional[GraphQLNonNull]: + ... + + +@overload # noqa: F811 +def type_from_ast(schema: GraphQLSchema, + type_node: TypeNode) -> Optional[GraphQLType]: + ... + + +def type_from_ast(schema, type_node): # noqa: F811 + """Get the GraphQL type definition from an AST node. + + Given a Schema and an AST node describing a type, return a GraphQLType + definition which applies to that type. For example, if provided the parsed + AST node for `[User]`, a GraphQLList instance will be returned, containing + the type called "User" found in the schema. If a type called "User" is not + found in the schema, then None will be returned. + """ + if isinstance(type_node, ListTypeNode): + inner_type = type_from_ast(schema, type_node.type) + return GraphQLList(inner_type) if inner_type else None + if isinstance(type_node, NonNullTypeNode): + inner_type = type_from_ast(schema, type_node.type) + return GraphQLNonNull(inner_type) if inner_type else None + if isinstance(type_node, NamedTypeNode): + return schema.get_type(type_node.name.value) + raise TypeError(f'Unexpected type kind: {type_node.kind}') diff --git a/graphql/utilities/type_info.py b/graphql/utilities/type_info.py new file mode 100644 index 00000000..057832ae --- /dev/null +++ b/graphql/utilities/type_info.py @@ -0,0 +1,247 @@ +from typing import Any, Callable, List, Optional, Union, cast + +from ..error import INVALID +from ..language import FieldNode, OperationType +from ..type import ( + GraphQLArgument, GraphQLCompositeType, GraphQLDirective, + GraphQLEnumValue, GraphQLField, GraphQLInputType, GraphQLInterfaceType, + GraphQLObjectType, GraphQLOutputType, GraphQLSchema, GraphQLType, + is_composite_type, is_input_type, is_output_type, get_named_type, + SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, is_object_type, + is_interface_type, get_nullable_type, is_list_type, is_input_object_type, + is_enum_type) +from ..utilities import type_from_ast + +__all__ = ['TypeInfo'] + + +GetFieldDefType = Callable[ + [GraphQLSchema, GraphQLType, FieldNode], Optional[GraphQLField]] + + +class TypeInfo: + """Utility class for keeping track of type definitions. + + TypeInfo is a utility class which, given a GraphQL schema, + can keep track of the current field and type definitions at any point + in a GraphQL document AST during a recursive descent by calling + `enter(node)` and `leave(node)`. + """ + + def __init__(self, schema: GraphQLSchema, + get_field_def_fn: GetFieldDefType=None, + initial_type: GraphQLType=None) -> None: + """Initialize the TypeInfo for the given GraphQL schema. + + The experimental optional second parameter is only needed in order to + support non-spec-compliant codebases. You should never need to use it. + It may disappear in the future. + + Initial type may be provided in rare cases to facilitate traversals + beginning somewhere other than documents. + """ + self._schema = schema + self._type_stack: List[Optional[GraphQLOutputType]] = [] + self._parent_type_stack: List[Optional[GraphQLCompositeType]] = [] + self._input_type_stack: List[Optional[GraphQLInputType]] = [] + self._field_def_stack: List[Optional[GraphQLField]] = [] + self._default_value_stack: List[Any] = [] + self._directive: Optional[GraphQLDirective] = None + self._argument: Optional[GraphQLArgument] = None + self._enum_value: Optional[GraphQLEnumValue] = None + self._get_field_def = get_field_def_fn or get_field_def + if initial_type: + if is_input_type(initial_type): + self._input_type_stack.append( + cast(GraphQLInputType, initial_type)) + if is_composite_type(initial_type): + self._parent_type_stack.append( + cast(GraphQLCompositeType, initial_type)) + if is_output_type(initial_type): + self._type_stack.append(cast(GraphQLOutputType, initial_type)) + + def get_type(self): + if self._type_stack: + return self._type_stack[-1] + + def get_parent_type(self): + if self._parent_type_stack: + return self._parent_type_stack[-1] + + def get_input_type(self): + if self._input_type_stack: + return self._input_type_stack[-1] + + def get_parent_input_type(self): + if len(self._input_type_stack) > 1: + return self._input_type_stack[-2] + + def get_field_def(self): + if self._field_def_stack: + return self._field_def_stack[-1] + + def get_default_value(self): + if self._default_value_stack: + return self._default_value_stack[-1] + + def get_directive(self): + return self._directive + + def get_argument(self): + return self._argument + + def get_enum_value(self): + return self._enum_value + + def enter(self, node): + method = getattr(self, 'enter_' + node.kind, None) + if method: + return method(node) + + def leave(self, node): + method = getattr(self, 'leave_' + node.kind, None) + if method: + return method() + + # noinspection PyUnusedLocal + def enter_selection_set(self, node): + named_type = get_named_type(self.get_type()) + self._parent_type_stack.append( + named_type if is_composite_type(named_type) else None) + + def enter_field(self, node): + parent_type = self.get_parent_type() + if parent_type: + field_def = self._get_field_def(self._schema, parent_type, node) + field_type = field_def.type if field_def else None + else: + field_def = field_type = None + self._field_def_stack.append(field_def) + self._type_stack.append( + field_type if is_output_type(field_type) else None) + + def enter_directive(self, node): + self._directive = self._schema.get_directive(node.name.value) + + def enter_operation_definition(self, node): + if node.operation == OperationType.QUERY: + type_ = self._schema.query_type + elif node.operation == OperationType.MUTATION: + type_ = self._schema.mutation_type + elif node.operation == OperationType.SUBSCRIPTION: + type_ = self._schema.subscription_type + else: + type_ = None + self._type_stack.append(type_ if is_object_type(type_) else None) + + def enter_inline_fragment(self, node): + type_condition_ast = node.type_condition + output_type = type_from_ast( + self._schema, type_condition_ast + ) if type_condition_ast else get_named_type(self.get_type()) + self._type_stack.append( + output_type if is_output_type(output_type) else None) + + enter_fragment_definition = enter_inline_fragment + + def enter_variable_definition(self, node): + input_type = type_from_ast(self._schema, node.type) + self._input_type_stack.append( + input_type if is_input_type(input_type) else None) + + def enter_argument(self, node): + field_or_directive = self.get_directive() or self.get_field_def() + if field_or_directive: + arg_def = field_or_directive.args.get(node.name.value) + arg_type = arg_def.type if arg_def else None + else: + arg_def = arg_type = None + self._argument = arg_def + self._default_value_stack.append( + arg_def.default_value if arg_def else INVALID) + self._input_type_stack.append( + arg_type if is_input_type(arg_type) else None) + + # noinspection PyUnusedLocal + def enter_list_value(self, node): + list_type = get_nullable_type(self.get_input_type()) + item_type = list_type.of_type if is_list_type(list_type) else list_type + # List positions never have a default value. + self._default_value_stack.append(INVALID) + self._input_type_stack.append( + item_type if is_input_type(item_type) else None) + + def enter_object_field(self, node): + object_type = get_named_type(self.get_input_type()) + if is_input_object_type(object_type): + input_field = object_type.fields.get(node.name.value) + input_field_type = input_field.type if input_field else None + else: + input_field = input_field_type = None + self._default_value_stack.append( + input_field.default_value if input_field else INVALID) + self._input_type_stack.append( + input_field_type if is_input_type(input_field_type) else None) + + def enter_enum_value(self, node): + enum_type = get_named_type(self.get_input_type()) + if is_enum_type(enum_type): + enum_value = enum_type.values.get(node.value) + else: + enum_value = None + self._enum_value = enum_value + + def leave_selection_set(self): + del self._parent_type_stack[-1:] + + def leave_field(self): + del self._field_def_stack[-1:] + del self._type_stack[-1:] + + def leave_directive(self): + self._directive = None + + def leave_operation_definition(self): + del self._type_stack[-1:] + + leave_inline_fragment = leave_operation_definition + leave_fragment_definition = leave_operation_definition + + def leave_variable_definition(self): + del self._input_type_stack[-1:] + + def leave_argument(self): + self._argument = None + del self._default_value_stack[-1:] + del self._input_type_stack[-1:] + + def leave_list_value(self): + del self._default_value_stack[-1:] + del self._input_type_stack[-1:] + + leave_object_field = leave_list_value + + def leave_enum(self): + self._enum_value = None + + +def get_field_def(schema: GraphQLSchema, parent_type: GraphQLType, + field_node: FieldNode) -> Optional[GraphQLField]: + """Get field definition. + + Not exactly the same as the executor's definition of getFieldDef, in this + statically evaluated environment we do not always have an Object type, + and need to handle Interface and Union types. + """ + name = field_node.name.value + if name == '__schema' and schema.query_type is parent_type: + return SchemaMetaFieldDef + if name == '__type' and schema.query_type is parent_type: + return TypeMetaFieldDef + if name == '__typename' and is_composite_type(parent_type): + return TypeNameMetaFieldDef + if is_object_type(parent_type) or is_interface_type(parent_type): + parent_type = cast( + Union[GraphQLObjectType, GraphQLInterfaceType], parent_type) + return parent_type.fields.get(name) + return None diff --git a/graphql/utilities/value_from_ast.py b/graphql/utilities/value_from_ast.py new file mode 100644 index 00000000..df74b824 --- /dev/null +++ b/graphql/utilities/value_from_ast.py @@ -0,0 +1,146 @@ +from typing import Any, Dict, List, Optional, cast + +from ..error import INVALID +from ..language import ( + EnumValueNode, ListValueNode, NullValueNode, + ObjectValueNode, ValueNode, VariableNode) +from ..pyutils import is_invalid +from ..type import ( + GraphQLEnumType, GraphQLInputObjectType, GraphQLInputType, GraphQLList, + GraphQLNonNull, GraphQLScalarType, is_enum_type, is_input_object_type, + is_list_type, is_non_null_type, is_scalar_type) + +__all__ = ['value_from_ast'] + + +def value_from_ast( + value_node: Optional[ValueNode], type_: GraphQLInputType, + variables: Dict[str, Any]=None) -> Any: + """Produce a Python value given a GraphQL Value AST. + + A GraphQL type must be provided, which will be used to interpret different + GraphQL Value literals. + + Returns `INVALID` when the value could not be validly coerced according + to the provided type. + + | GraphQL Value | JSON Value | Python Value | + | -------------------- | ------------- | ------------ | + | Input Object | Object | dict | + | List | Array | list | + | Boolean | Boolean | bool | + | String | String | str | + | Int / Float | Number | int / float | + | Enum Value | Mixed | Any | + | NullValue | null | None | + + """ + if not value_node: + # When there is no node, then there is also no value. + # Importantly, this is different from returning the value null. + return INVALID + + if is_non_null_type(type_): + if isinstance(value_node, NullValueNode): + return INVALID + type_ = cast(GraphQLNonNull, type_) + return value_from_ast(value_node, type_.of_type, variables) + + if isinstance(value_node, NullValueNode): + return None # This is explicitly returning the value None. + + if isinstance(value_node, VariableNode): + variable_name = value_node.name.value + if not variables: + return INVALID + variable_value = variables.get(variable_name, INVALID) + if is_invalid(variable_value): + return INVALID + if variable_value is None and is_non_null_type(type_): + return INVALID + # Note: This does no further checking that this variable is correct. + # This assumes that this query has been validated and the variable + # usage here is of the correct type. + return variable_value + + if is_list_type(type_): + type_ = cast(GraphQLList, type_) + item_type = type_.of_type + if isinstance(value_node, ListValueNode): + coerced_values: List[Any] = [] + append_value = coerced_values.append + for item_node in value_node.values: + if is_missing_variable(item_node, variables): + # If an array contains a missing variable, it is either + # coerced to None or if the item type is non-null, it + # is considered invalid. + if is_non_null_type(item_type): + return INVALID + append_value(None) + else: + item_value = value_from_ast( + item_node, item_type, variables) + if is_invalid(item_value): + return INVALID + append_value(item_value) + return coerced_values + coerced_value = value_from_ast(value_node, item_type, variables) + if is_invalid(coerced_value): + return INVALID + return [coerced_value] + + if is_input_object_type(type_): + if not isinstance(value_node, ObjectValueNode): + return INVALID + type_ = cast(GraphQLInputObjectType, type_) + coerced_obj: Dict[str, Any] = {} + fields = type_.fields + field_nodes = {field.name.value: field for field in value_node.fields} + for field_name, field in fields.items(): + field_node = field_nodes.get(field_name) + if not field_node or is_missing_variable( + field_node.value, variables): + if field.default_value is not INVALID: + coerced_obj[field_name] = field.default_value + elif is_non_null_type(field.type): + return INVALID + continue + field_value = value_from_ast( + field_node.value, field.type, variables) + if is_invalid(field_value): + return INVALID + coerced_obj[field_name] = field_value + return coerced_obj + + if is_enum_type(type_): + if not isinstance(value_node, EnumValueNode): + return INVALID + type_ = cast(GraphQLEnumType, type_) + enum_value = type_.values.get(value_node.value) + if not enum_value: + return INVALID + return enum_value.value + + if is_scalar_type(type_): + # Scalars fulfill parsing a literal value via parse_literal(). + # Invalid values represent a failure to parse correctly, in which case + # INVALID is returned. + type_ = cast(GraphQLScalarType, type_) + try: + if variables: + result = type_.parse_literal(value_node, variables) + else: + result = type_.parse_literal(value_node) + except (ArithmeticError, TypeError, ValueError): + return INVALID + if is_invalid(result): + return INVALID + return result + + +def is_missing_variable( + value_node: ValueNode, variables: Dict[str, Any]=None) -> bool: + """Check if value_node is a variable not defined in the variables dict.""" + return isinstance(value_node, VariableNode) and ( + not variables or + is_invalid(variables.get(value_node.name.value, INVALID))) diff --git a/graphql/utilities/value_from_ast_untyped.py b/graphql/utilities/value_from_ast_untyped.py new file mode 100644 index 00000000..e7cfd911 --- /dev/null +++ b/graphql/utilities/value_from_ast_untyped.py @@ -0,0 +1,84 @@ +from typing import Any, Dict + +from ..error import INVALID +from ..language import ValueNode +from ..pyutils import is_invalid + +__all__ = ['value_from_ast_untyped'] + + +def value_from_ast_untyped( + value_node: ValueNode, variables: Dict[str, Any]=None) -> Any: + """Produce a Python value given a GraphQL Value AST. + + Unlike `value_from_ast()`, no type is provided. The resulting Python + value will reflect the provided GraphQL value AST. + + | GraphQL Value | JSON Value | Python Value | + | -------------------- | ---------- | ------------ | + | Input Object | Object | dict | + | List | Array | list | + | Boolean | Boolean | bool | + | String / Enum | String | str | + | Int / Float | Number | int / float | + | Null | null | None | + + """ + func = _value_from_kind_functions.get(value_node.kind) + if func: + return func(value_node, variables) + raise TypeError(f'Unexpected value kind: {value_node.kind}') + + +def value_from_null(_value_node, _variables): + return None + + +def value_from_int(value_node, _variables): + try: + return int(value_node.value) + except ValueError: + return INVALID + + +def value_from_float(value_node, _variables): + try: + return float(value_node.value) + except ValueError: + return INVALID + + +def value_from_string(value_node, _variables): + return value_node.value + + +def value_from_list(value_node, variables): + return [value_from_ast_untyped(node, variables) + for node in value_node.values] + + +def value_from_object(value_node, variables): + return {field.name.value: value_from_ast_untyped(field.value, variables) + for field in value_node.fields} + + +def value_from_variable(value_node, variables): + variable_name = value_node.name.value + if not variables: + return INVALID + value = variables.get(variable_name, INVALID) + if is_invalid(value): + return INVALID + return value + + +_value_from_kind_functions = { + 'null_value': value_from_null, + 'int_value': value_from_int, + 'float_value': value_from_float, + 'string_value': value_from_string, + 'enum_value': value_from_string, + 'boolean_value': value_from_string, + 'list_value': value_from_list, + 'object_value': value_from_object, + 'variable': value_from_variable} diff --git a/graphql/validation/__init__.py b/graphql/validation/__init__.py new file mode 100644 index 00000000..567d16ca --- /dev/null +++ b/graphql/validation/__init__.py @@ -0,0 +1,107 @@ +"""GraphQL Validation + +The `graphql.validation` package fulfills the Validation phase of fulfilling +a GraphQL result. +""" + +from .validate import validate + +from .validation_context import ValidationContext + +from .specified_rules import specified_rules + +# Spec Section: "Executable Definitions" +from .rules.executable_definitions import ExecutableDefinitionsRule + +# Spec Section: "Field Selections on Objects, Interfaces, and Unions Types" +from .rules.fields_on_correct_type import FieldsOnCorrectTypeRule + +# Spec Section: "Fragments on Composite Types" +from .rules.fragments_on_composite_types import FragmentsOnCompositeTypesRule + +# Spec Section: "Argument Names" +from .rules.known_argument_names import KnownArgumentNamesRule + +# Spec Section: "Directives Are Defined" +from .rules.known_directives import KnownDirectivesRule + +# Spec Section: "Fragment spread target defined" +from .rules.known_fragment_names import KnownFragmentNamesRule + +# Spec Section: "Fragment Spread Type Existence" +from .rules.known_type_names import KnownTypeNamesRule + +# Spec Section: "Lone Anonymous Operation" +from .rules.lone_anonymous_operation import LoneAnonymousOperationRule + +# Spec Section: "Fragments must not form cycles" +from .rules.no_fragment_cycles import NoFragmentCyclesRule + +# Spec Section: "All Variable Used Defined" +from .rules.no_undefined_variables import NoUndefinedVariablesRule + +# Spec Section: "Fragments must be used" +from .rules.no_unused_fragments import NoUnusedFragmentsRule + +# Spec Section: "All Variables Used" +from .rules.no_unused_variables import NoUnusedVariablesRule + +# Spec Section: "Field Selection Merging" +from .rules.overlapping_fields_can_be_merged import ( + OverlappingFieldsCanBeMergedRule) + +# Spec Section: "Fragment spread is possible" +from .rules.possible_fragment_spreads import PossibleFragmentSpreadsRule + +# Spec Section: "Argument Optionality" +from .rules.provided_required_arguments import ProvidedRequiredArgumentsRule + +# Spec Section: "Leaf Field Selections" +from .rules.scalar_leafs import ScalarLeafsRule + +# Spec Section: "Subscriptions with Single Root Field" +from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule + +# Spec Section: "Argument Uniqueness" +from .rules.unique_argument_names import UniqueArgumentNamesRule + +# Spec Section: "Directives Are Unique Per Location" +from .rules.unique_directives_per_location import ( + UniqueDirectivesPerLocationRule) + +# Spec Section: "Fragment Name Uniqueness" +from .rules.unique_fragment_names import UniqueFragmentNamesRule + +# Spec Section: "Input Object Field Uniqueness" +from .rules.unique_input_field_names import UniqueInputFieldNamesRule + +# Spec Section: "Operation Name Uniqueness" +from .rules.unique_operation_names import UniqueOperationNamesRule + +# Spec Section: "Variable Uniqueness" +from .rules.unique_variable_names import UniqueVariableNamesRule + +# Spec Section: "Value Type Correctness" +from .rules.values_of_correct_type import ValuesOfCorrectTypeRule + +# Spec Section: "Variables are Input Types" +from .rules.variables_are_input_types import VariablesAreInputTypesRule + +# Spec Section: "All Variable Usages Are Allowed" +from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule + +__all__ = [ + 'validate', 'ValidationContext', 'specified_rules', + 'ExecutableDefinitionsRule', 'FieldsOnCorrectTypeRule', + 'FragmentsOnCompositeTypesRule', 'KnownArgumentNamesRule', + 'KnownDirectivesRule', 'KnownFragmentNamesRule', 'KnownTypeNamesRule', + 'LoneAnonymousOperationRule', 'NoFragmentCyclesRule', + 'NoUndefinedVariablesRule', 'NoUnusedFragmentsRule', + 'NoUnusedVariablesRule', 'OverlappingFieldsCanBeMergedRule', + 'PossibleFragmentSpreadsRule', 'ProvidedRequiredArgumentsRule', + 'ScalarLeafsRule', 'SingleFieldSubscriptionsRule', + 'UniqueArgumentNamesRule', 'UniqueDirectivesPerLocationRule', + 'UniqueFragmentNamesRule', 'UniqueInputFieldNamesRule', + 'UniqueOperationNamesRule', 'UniqueVariableNamesRule', + 'ValuesOfCorrectTypeRule', 'VariablesAreInputTypesRule', + 'VariablesInAllowedPositionRule'] diff --git a/graphql/validation/rules/__init__.py b/graphql/validation/rules/__init__.py new file mode 100644 index 00000000..74f4acc5 --- /dev/null +++ b/graphql/validation/rules/__init__.py @@ -0,0 +1,16 @@ +"""graphql.validation.rules package""" + +from ...error import GraphQLError +from ...language.visitor import Visitor +from ..validation_context import ValidationContext + +__all__ = ['ValidationRule'] + + +class ValidationRule(Visitor): + + def __init__(self, context: ValidationContext) -> None: + self.context = context + + def report_error(self, error: GraphQLError): + self.context.report_error(error) diff --git a/graphql/validation/rules/executable_definitions.py b/graphql/validation/rules/executable_definitions.py new file mode 100644 index 00000000..a60bbc12 --- /dev/null +++ b/graphql/validation/rules/executable_definitions.py @@ -0,0 +1,30 @@ +from ...error import GraphQLError +from ...language import ( + FragmentDefinitionNode, OperationDefinitionNode, + SchemaDefinitionNode, SchemaExtensionNode) +from . import ValidationRule + +__all__ = ['ExecutableDefinitionsRule', 'non_executable_definitions_message'] + + +def non_executable_definitions_message(def_name: str) -> str: + return f'The {def_name} definition is not executable.' + + +class ExecutableDefinitionsRule(ValidationRule): + """Executable definitions + + A GraphQL document is only valid for execution if all definitions are + either operation or fragment definitions. + """ + + def enter_document(self, node, *_args): + for definition in node.definitions: + if not isinstance(definition, ( + OperationDefinitionNode, FragmentDefinitionNode)): + self.report_error(GraphQLError( + non_executable_definitions_message( + 'schema' if isinstance(definition, ( + SchemaDefinitionNode, SchemaExtensionNode)) + else definition.name.value), [definition])) + return self.SKIP diff --git a/graphql/validation/rules/fields_on_correct_type.py b/graphql/validation/rules/fields_on_correct_type.py new file mode 100644 index 00000000..087b160c --- /dev/null +++ b/graphql/validation/rules/fields_on_correct_type.py @@ -0,0 +1,107 @@ +from collections import defaultdict +from typing import Dict, List, cast + +from ...type import ( + GraphQLAbstractType, GraphQLSchema, GraphQLOutputType, + is_abstract_type, is_interface_type, is_object_type) +from ...error import GraphQLError +from ...pyutils import quoted_or_list, suggestion_list +from . import ValidationRule + +__all__ = ['FieldsOnCorrectTypeRule', 'undefined_field_message'] + + +def undefined_field_message( + field_name: str, type_: str, + suggested_type_names: List[str], + suggested_field_names: List[str]) -> str: + message = f"Cannot query field '{field_name}' on type '{type_}'." + if suggested_type_names: + suggestions = quoted_or_list(suggested_type_names) + message += f' Did you mean to use an inline fragment on {suggestions}?' + elif suggested_field_names: + suggestions = quoted_or_list(suggested_field_names) + message += f' Did you mean {suggestions}?' + return message + + +class FieldsOnCorrectTypeRule(ValidationRule): + """Fields on correct type + + A GraphQL document is only valid if all fields selected are defined by the + parent type, or are an allowed meta field such as __typename. + """ + + def enter_field(self, node, *_args): + type_ = self.context.get_parent_type() + if not type_: + return + field_def = self.context.get_field_def() + if field_def: + return + # This field doesn't exist, lets look for suggestions. + schema = self.context.schema + field_name = node.name.value + # First determine if there are any suggested types to condition on. + suggested_type_names = get_suggested_type_names( + schema, type_, field_name) + # If there are no suggested types, then perhaps this was a typo? + suggested_field_names = ( + [] if suggested_type_names + else get_suggested_field_names(type_, field_name)) + + # Report an error, including helpful suggestions. + self.report_error(GraphQLError(undefined_field_message( + field_name, type_.name, + suggested_type_names, suggested_field_names), [node])) + + +def get_suggested_type_names( + schema: GraphQLSchema, type_: GraphQLOutputType, + field_name: str) -> List[str]: + """ + Get a list of suggested type names. + + Go through all of the implementations of type, as well as the interfaces + that they implement. If any of those types include the provided field, + suggest them, sorted by how often the type is referenced, starting + with Interfaces. + """ + if is_abstract_type(type_): + type_ = cast(GraphQLAbstractType, type_) + suggested_object_types = [] + interface_usage_count: Dict[str, int] = defaultdict(int) + for possible_type in schema.get_possible_types(type_): + if field_name not in possible_type.fields: + continue + # This object type defines this field. + suggested_object_types.append(possible_type.name) + for possible_interface in possible_type.interfaces: + if field_name not in possible_interface.fields: + continue + # This interface type defines this field. + interface_usage_count[possible_interface.name] += 1 + + # Suggest interface types based on how common they are. + suggested_interface_types = sorted( + interface_usage_count, key=lambda k: -interface_usage_count[k]) + + # Suggest both interface and object types. + return suggested_interface_types + suggested_object_types + + # Otherwise, must be an Object type, which does not have possible fields. + return [] + + +def get_suggested_field_names( + type_: GraphQLOutputType, field_name: str) -> List[str]: + """Get a list of suggested field names. + + For the field name provided, determine if there are any similar field names + that may be the result of a typo. + """ + if is_object_type(type_) or is_interface_type(type_): + possible_field_names = list(type_.fields) # type: ignore + return suggestion_list(field_name, possible_field_names) + # Otherwise, must be a Union type, which does not define fields. + return [] diff --git a/graphql/validation/rules/fragments_on_composite_types.py b/graphql/validation/rules/fragments_on_composite_types.py new file mode 100644 index 00000000..e5387232 --- /dev/null +++ b/graphql/validation/rules/fragments_on_composite_types.py @@ -0,0 +1,48 @@ +from ...error import GraphQLError +from ...language.printer import print_ast +from ...type import is_composite_type +from ...utilities import type_from_ast +from . import ValidationRule + +__all__ = [ + 'FragmentsOnCompositeTypesRule', + 'inline_fragment_on_non_composite_error_message', + 'fragment_on_non_composite_error_message'] + + +def inline_fragment_on_non_composite_error_message( + type_: str) -> str: + return f"Fragment cannot condition on non composite type '{type_}'." + + +def fragment_on_non_composite_error_message( + frag_name: str, type_: str) -> str: + return (f"Fragment '{frag_name}'" + f" cannot condition on non composite type '{type_}'.") + + +class FragmentsOnCompositeTypesRule(ValidationRule): + """Fragments on composite type + + Fragments use a type condition to determine if they apply, since fragments + can only be spread into a composite type (object, interface, or union), the + type condition must also be a composite type. + """ + + def enter_inline_fragment(self, node, *_args): + type_condition = node.type_condition + if type_condition: + type_ = type_from_ast(self.context.schema, type_condition) + if type_ and not is_composite_type(type_): + self.report_error(GraphQLError( + inline_fragment_on_non_composite_error_message( + print_ast(type_condition)), [type_condition])) + + def enter_fragment_definition(self, node, *_args): + type_condition = node.type_condition + type_ = type_from_ast(self.context.schema, type_condition) + if type_ and not is_composite_type(type_): + self.report_error(GraphQLError( + fragment_on_non_composite_error_message( + node.name.value, print_ast(type_condition)), + [type_condition])) diff --git a/graphql/validation/rules/known_argument_names.py b/graphql/validation/rules/known_argument_names.py new file mode 100644 index 00000000..5e59accd --- /dev/null +++ b/graphql/validation/rules/known_argument_names.py @@ -0,0 +1,66 @@ +from typing import List + +from ...error import GraphQLError +from ...language import FieldNode, DirectiveNode +from ...pyutils import quoted_or_list, suggestion_list +from . import ValidationRule + +__all__ = [ + 'KnownArgumentNamesRule', + 'unknown_arg_message', 'unknown_directive_arg_message'] + + +def unknown_arg_message( + arg_name: str, field_name: str, type_name: str, + suggested_args: List[str]) -> str: + message = (f"Unknown argument '{arg_name}' on field '{field_name}'" + f" of type '{type_name}'.") + if suggested_args: + message += f' Did you mean {quoted_or_list(suggested_args)}?' + return message + + +def unknown_directive_arg_message( + arg_name: str, directive_name: str, + suggested_args: List[str]) -> str: + message = (f"Unknown argument '{arg_name}'" + f" on directive '@{directive_name}'.") + if suggested_args: + message += f' Did you mean {quoted_or_list(suggested_args)}?' + return message + + +class KnownArgumentNamesRule(ValidationRule): + """Known argument names + + A GraphQL field is only valid if all supplied arguments are defined by + that field. + """ + + def enter_argument(self, node, _key, _parent, _path, ancestors): + context = self.context + arg_def = context.get_argument() + if not arg_def: + argument_of = ancestors[-1] + if isinstance(argument_of, FieldNode): + field_def = context.get_field_def() + parent_type = context.get_parent_type() + if field_def and parent_type: + context.report_error(GraphQLError( + unknown_arg_message( + node.name.value, + argument_of.name.value, + parent_type.name, + suggestion_list( + node.name.value, list(field_def.args))), + [node])) + elif isinstance(argument_of, DirectiveNode): + directive = context.get_directive() + if directive: + context.report_error(GraphQLError( + unknown_directive_arg_message( + node.name.value, + directive.name, + suggestion_list( + node.name.value, list(directive.args))), + [node])) diff --git a/graphql/validation/rules/known_directives.py b/graphql/validation/rules/known_directives.py new file mode 100644 index 00000000..2571deb1 --- /dev/null +++ b/graphql/validation/rules/known_directives.py @@ -0,0 +1,85 @@ +from typing import cast + +from ...error import GraphQLError +from ...language import DirectiveLocation, Node, OperationDefinitionNode +from . import ValidationRule + +__all__ = [ + 'KnownDirectivesRule', + 'unknown_directive_message', 'misplaced_directive_message'] + + +def unknown_directive_message(directive_name: str) -> str: + return f"Unknown directive '{directive_name}'." + + +def misplaced_directive_message(directive_name, location): + return f"Directive '{directive_name}' may not be used on {location}." + + +class KnownDirectivesRule(ValidationRule): + """Known directives + + A GraphQL document is only valid if all `@directives` are known by the + schema and legally positioned. + """ + + def enter_directive(self, node, _key, _parent, _path, ancestors): + for definition in self.context.schema.directives: + if definition.name == node.name.value: + candidate_location = get_directive_location_for_ast_path( + ancestors) + if (candidate_location + and candidate_location not in definition.locations): + self.report_error(GraphQLError( + misplaced_directive_message( + node.name.value, candidate_location.value), + [node])) + break + else: + self.report_error(GraphQLError( + unknown_directive_message(node.name.value), [node])) + + +_operation_location = { + 'query': DirectiveLocation.QUERY, + 'mutation': DirectiveLocation.MUTATION, + 'subscription': DirectiveLocation.SUBSCRIPTION} + +_directive_location = { + 'field': DirectiveLocation.FIELD, + 'fragment_spread': DirectiveLocation.FRAGMENT_SPREAD, + 'inline_fragment': DirectiveLocation.INLINE_FRAGMENT, + 'fragment_definition': DirectiveLocation.FRAGMENT_DEFINITION, + 'schema_definition': DirectiveLocation.SCHEMA, + 'schema_extension': DirectiveLocation.SCHEMA, + 'scalar_type_definition': DirectiveLocation.SCALAR, + 'scalar_type_extension': DirectiveLocation.SCALAR, + 'object_type_definition': DirectiveLocation.OBJECT, + 'object_type_extension': DirectiveLocation.OBJECT, + 'field_definition': DirectiveLocation.FIELD_DEFINITION, + 'interface_type_definition': DirectiveLocation.INTERFACE, + 'interface_type_extension': DirectiveLocation.INTERFACE, + 'union_type_definition': DirectiveLocation.UNION, + 'union_type_extension': DirectiveLocation.UNION, + 'enum_type_definition': DirectiveLocation.ENUM, + 'enum_type_extension': DirectiveLocation.ENUM, + 'enum_value_definition': DirectiveLocation.ENUM_VALUE, + 'input_object_type_definition': DirectiveLocation.INPUT_OBJECT, + 'input_object_type_extension': DirectiveLocation.INPUT_OBJECT} + + +def get_directive_location_for_ast_path(ancestors): + applied_to = ancestors[-1] + if isinstance(applied_to, Node): + kind = applied_to.kind + if kind == 'operation_definition': + applied_to = cast(OperationDefinitionNode, applied_to) + return _operation_location.get(applied_to.operation.value) + elif kind == 'input_value_definition': + parent_node = ancestors[-3] + return (DirectiveLocation.INPUT_FIELD_DEFINITION + if parent_node.kind == 'input_object_type_definition' + else DirectiveLocation.ARGUMENT_DEFINITION) + else: + return _directive_location.get(kind) diff --git a/graphql/validation/rules/known_fragment_names.py b/graphql/validation/rules/known_fragment_names.py new file mode 100644 index 00000000..dae59021 --- /dev/null +++ b/graphql/validation/rules/known_fragment_names.py @@ -0,0 +1,23 @@ +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['KnownFragmentNamesRule', 'unknown_fragment_message'] + + +def unknown_fragment_message(fragment_name): + return f"Unknown fragment '{fragment_name}'." + + +class KnownFragmentNamesRule(ValidationRule): + """Known fragment names + + A GraphQL document is only valid if all `...Fragment` fragment spreads + refer to fragments defined in the same document. + """ + + def enter_fragment_spread(self, node, *_args): + fragment_name = node.name.value + fragment = self.context.get_fragment(fragment_name) + if not fragment: + self.report_error(GraphQLError( + unknown_fragment_message(fragment_name), [node.name])) diff --git a/graphql/validation/rules/known_type_names.py b/graphql/validation/rules/known_type_names.py new file mode 100644 index 00000000..533c4254 --- /dev/null +++ b/graphql/validation/rules/known_type_names.py @@ -0,0 +1,43 @@ +from typing import List + +from ...error import GraphQLError +from ...pyutils import suggestion_list +from . import ValidationRule + +__all__ = ['KnownTypeNamesRule', 'unknown_type_message'] + + +def unknown_type_message(type_name: str, suggested_types: List[str]) -> str: + message = f"Unknown type '{type_name}'." + if suggested_types: + message += ' Perhaps you meant {quoted_or_list(suggested_types)}?' + return message + + +class KnownTypeNamesRule(ValidationRule): + """Known type names + + A GraphQL document is only valid if referenced types (specifically variable + definitions and fragment conditions) are defined by the type schema. + """ + + def enter_object_type_definition(self, *_args): + return self.SKIP + + def enter_interface_type_definition(self, *_args): + return self.SKIP + + def enter_union_type_definition(self, *_args): + return self.SKIP + + def enter_input_object_type_definition(self, *_args): + return self.SKIP + + def enter_named_type(self, node, *_args): + schema = self.context.schema + type_name = node.name.value + if not schema.get_type(type_name): + self.report_error(GraphQLError( + unknown_type_message( + type_name, suggestion_list( + type_name, list(schema.type_map))), [node])) diff --git a/graphql/validation/rules/lone_anonymous_operation.py b/graphql/validation/rules/lone_anonymous_operation.py new file mode 100644 index 00000000..8d35e1a2 --- /dev/null +++ b/graphql/validation/rules/lone_anonymous_operation.py @@ -0,0 +1,33 @@ +from ...language import OperationDefinitionNode +from ...error import GraphQLError +from . import ValidationRule + +__all__ = [ + 'LoneAnonymousOperationRule', 'anonymous_operation_not_alone_message'] + + +def anonymous_operation_not_alone_message() -> str: + return 'This anonymous operation must be the only defined operation.' + + +class LoneAnonymousOperationRule(ValidationRule): + """Lone anonymous operation + + A GraphQL document is only valid if when it contains an anonymous operation + (the query short-hand) that it contains only that one operation definition. + + """ + + def __init__(self, context): + super().__init__(context) + self.operation_count = 0 + + def enter_document(self, node, *_args): + self.operation_count = sum( + 1 for definition in node.definitions + if isinstance(definition, OperationDefinitionNode)) + + def enter_operation_definition(self, node, *_args): + if not node.name and self.operation_count > 1: + self.report_error(GraphQLError( + anonymous_operation_not_alone_message(), [node])) diff --git a/graphql/validation/rules/no_fragment_cycles.py b/graphql/validation/rules/no_fragment_cycles.py new file mode 100644 index 00000000..3ff1b82e --- /dev/null +++ b/graphql/validation/rules/no_fragment_cycles.py @@ -0,0 +1,74 @@ +from typing import List + +from ...language import FragmentDefinitionNode +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['NoFragmentCyclesRule', 'cycle_error_message'] + + +def cycle_error_message(frag_name: str, spread_names: List[str]) -> str: + via = f" via {', '.join(spread_names)}" if spread_names else '' + return f"Cannot spread fragment '{frag_name}' within itself{via}." + + +class NoFragmentCyclesRule(ValidationRule): + """No fragment cycles""" + + def __init__(self, context): + super().__init__(context) + self.errors = [] + # Tracks already visited fragments to maintain O(N) and to ensure that + # cycles are not redundantly reported. + self.visited_frags = set() + # List of AST nodes used to produce meaningful errors + self.spread_path = [] + # Position in the spread path + self.spread_path_index_by_name = {} + + def enter_operation_definition(self, *_args): + return self.SKIP + + def enter_fragment_definition(self, node, *_args): + self.detect_cycle_recursive(node) + return self.SKIP + + def detect_cycle_recursive(self, fragment: FragmentDefinitionNode): + # This does a straight-forward DFS to find cycles. + # It does not terminate when a cycle was found but continues to explore + # the graph to find all possible cycles. + if fragment.name.value in self.visited_frags: + return + + fragment_name = fragment.name.value + visited_frags = self.visited_frags + visited_frags.add(fragment_name) + + spread_nodes = self.context.get_fragment_spreads( + fragment.selection_set) + if not spread_nodes: + return + + spread_path = self.spread_path + spread_path_index = self.spread_path_index_by_name + spread_path_index[fragment_name] = len(spread_path) + get_fragment = self.context.get_fragment + + for spread_node in spread_nodes: + spread_name = spread_node.name.value + cycle_index = spread_path_index.get(spread_name) + + spread_path.append(spread_node) + if cycle_index is None: + spread_fragment = get_fragment(spread_name) + if spread_fragment: + self.detect_cycle_recursive(spread_fragment) + else: + cycle_path = spread_path[cycle_index:] + fragment_names = [s.name.value for s in cycle_path[:-1]] + self.report_error(GraphQLError( + cycle_error_message(spread_name, fragment_names), + cycle_path)) + spread_path.pop() + + spread_path_index[fragment_name] = None diff --git a/graphql/validation/rules/no_undefined_variables.py b/graphql/validation/rules/no_undefined_variables.py new file mode 100644 index 00000000..61e037d2 --- /dev/null +++ b/graphql/validation/rules/no_undefined_variables.py @@ -0,0 +1,38 @@ +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['NoUndefinedVariablesRule', 'undefined_var_message'] + + +def undefined_var_message(var_name: str, op_name: str=None) -> str: + return (f"Variable '${var_name}' is not defined by operation '{op_name}'." + if op_name else f"Variable '${var_name}' is not defined.") + + +class NoUndefinedVariablesRule(ValidationRule): + """No undefined variables + + A GraphQL operation is only valid if all variables encountered, both + directly and via fragment spreads, are defined by that operation. + """ + + def __init__(self, context): + super().__init__(context) + self.defined_variable_names = set() + + def enter_operation_definition(self, *_args): + self.defined_variable_names.clear() + + def leave_operation_definition(self, operation, *_args): + usages = self.context.get_recursive_variable_usages(operation) + defined_variables = self.defined_variable_names + for usage in usages: + node = usage.node + var_name = node.name.value + if var_name not in defined_variables: + self.report_error(GraphQLError(undefined_var_message( + var_name, operation.name and operation.name.value), + [node, operation])) + + def enter_variable_definition(self, node, *_args): + self.defined_variable_names.add(node.variable.name.value) diff --git a/graphql/validation/rules/no_unused_fragments.py b/graphql/validation/rules/no_unused_fragments.py new file mode 100644 index 00000000..16cab2cc --- /dev/null +++ b/graphql/validation/rules/no_unused_fragments.py @@ -0,0 +1,43 @@ +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['NoUnusedFragmentsRule', 'unused_fragment_message'] + + +def unused_fragment_message(frag_name: str) -> str: + return f"Fragment '{frag_name}' is never used." + + +class NoUnusedFragmentsRule(ValidationRule): + """No unused fragments + + A GraphQL document is only valid if all fragment definitions are + spread within operations, or spread within other fragments spread + within operations. + """ + + def __init__(self, context): + super().__init__(context) + self.operation_defs = [] + self.fragment_defs = [] + + def enter_operation_definition(self, node, *_args): + self.operation_defs.append(node) + return False + + def enter_fragment_definition(self, node, *_args): + self.fragment_defs.append(node) + return False + + def leave_document(self, *_args): + fragment_names_used = set() + get_fragments = self.context.get_recursively_referenced_fragments + for operation in self.operation_defs: + for fragment in get_fragments(operation): + fragment_names_used.add(fragment.name.value) + + for fragment_def in self.fragment_defs: + frag_name = fragment_def.name.value + if frag_name not in fragment_names_used: + self.report_error(GraphQLError( + unused_fragment_message(frag_name), [fragment_def])) diff --git a/graphql/validation/rules/no_unused_variables.py b/graphql/validation/rules/no_unused_variables.py new file mode 100644 index 00000000..d6992d1e --- /dev/null +++ b/graphql/validation/rules/no_unused_variables.py @@ -0,0 +1,41 @@ +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['NoUnusedVariablesRule', 'unused_variable_message'] + + +def unused_variable_message(var_name: str, op_name: str=None) -> str: + return (f"Variable '${var_name}' is never used in operation '{op_name}'." + if op_name else f"Variable '${var_name}' is never used.") + + +class NoUnusedVariablesRule(ValidationRule): + """No unused variables + + A GraphQL operation is only valid if all variables defined by an operation + are used, either directly or within a spread fragment. + """ + + def __init__(self, context): + super().__init__(context) + self.variable_defs = [] + + def enter_operation_definition(self, *_args): + self.variable_defs.clear() + + def leave_operation_definition(self, operation, *_args): + variable_name_used = set() + usages = self.context.get_recursive_variable_usages(operation) + op_name = operation.name.value if operation.name else None + + for usage in usages: + variable_name_used.add(usage.node.name.value) + + for variable_def in self.variable_defs: + variable_name = variable_def.variable.name.value + if variable_name not in variable_name_used: + self.report_error(GraphQLError(unused_variable_message( + variable_name, op_name), [variable_def])) + + def enter_variable_definition(self, definition, *_args): + self.variable_defs.append(definition) diff --git a/graphql/validation/rules/overlapping_fields_can_be_merged.py b/graphql/validation/rules/overlapping_fields_can_be_merged.py new file mode 100644 index 00000000..5b288a52 --- /dev/null +++ b/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -0,0 +1,750 @@ +from itertools import chain +from typing import Dict, List, Optional, Sequence, Set, Tuple, Union, cast + +from ...error import GraphQLError +from ...language import ( + ArgumentNode, FieldNode, FragmentDefinitionNode, FragmentSpreadNode, + InlineFragmentNode, SelectionSetNode, print_ast) +from ...type import ( + GraphQLCompositeType, GraphQLField, GraphQLList, GraphQLNamedType, + GraphQLNonNull, GraphQLOutputType, + get_named_type, is_interface_type, is_leaf_type, + is_list_type, is_non_null_type, is_object_type) +from ...utilities import type_from_ast +from . import ValidationContext, ValidationRule + +MYPY = False + +__all__ = [ + 'OverlappingFieldsCanBeMergedRule', + 'fields_conflict_message', 'reason_message'] + + +def fields_conflict_message( + response_name: str, reason: 'ConflictReasonMessage') -> str: + return ( + f"Fields '{response_name}' conflict because {reason_message(reason)}." + ' Use different aliases on the fields to fetch both if this was' + ' intentional.') + + +def reason_message(reason: 'ConflictReasonMessage') -> str: + if isinstance(reason, list): + return ' and '.join( + f"subfields '{response_name}' conflict" + f' because {reason_message(subreason)}' + for response_name, subreason in reason) + return reason + + +class OverlappingFieldsCanBeMergedRule(ValidationRule): + """Overlapping fields can be merged + + A selection set is only valid if all fields (including spreading any + fragments) either correspond to distinct response names or can be merged + without ambiguity. + """ + + def __init__(self, context): + super().__init__(context) + # A memoization for when two fragments are compared "between" each + # other for conflicts. + # Two fragments may be compared many times, so memoizing this can + # dramatically improve the performance of this validator. + self.compared_fragment_pairs = PairSet() + + # A cache for the "field map" and list of fragment names found in any + # given selection set. + # Selection sets may be asked for this information multiple times, + # so this improves the performance of this validator. + self.cached_fields_and_fragment_names = {} + + def enter_selection_set(self, selection_set, *_args): + conflicts = find_conflicts_within_selection_set( + self.context, + self.cached_fields_and_fragment_names, + self.compared_fragment_pairs, + self.context.get_parent_type(), + selection_set) + for (reason_name, reason), fields1, fields2 in conflicts: + self.report_error(GraphQLError( + fields_conflict_message(reason_name, reason), + fields1 + fields2)) + + +Conflict = Tuple['ConflictReason', List[FieldNode], List[FieldNode]] +# Field name and reason. +ConflictReason = Tuple[str, 'ConflictReasonMessage'] +# Reason is a string, or a nested list of conflicts. +if MYPY: # recursive types not fully supported yet (/python/mypy/issues/731) + ConflictReasonMessage = Union[str, List] +else: + ConflictReasonMessage = Union[str, List[ConflictReason]] +# Tuple defining a field node in a context. +NodeAndDef = Tuple[GraphQLCompositeType, FieldNode, Optional[GraphQLField]] +# Dictionary of lists of those. +NodeAndDefCollection = Dict[str, List[NodeAndDef]] + + +# Algorithm: +# +# Conflicts occur when two fields exist in a query which will produce the same +# response name, but represent differing values, thus creating a conflict. +# The algorithm below finds all conflicts via making a series of comparisons +# between fields. In order to compare as few fields as possible, this makes +# a series of comparisons "within" sets of fields and "between" sets of fields. +# +# Given any selection set, a collection produces both a set of fields by +# also including all inline fragments, as well as a list of fragments +# referenced by fragment spreads. +# +# A) Each selection set represented in the document first compares "within" its +# collected set of fields, finding any conflicts between every pair of +# overlapping fields. +# Note: This is the#only time* that a the fields "within" a set are compared +# to each other. After this only fields "between" sets are compared. +# +# B) Also, if any fragment is referenced in a selection set, then a +# comparison is made "between" the original set of fields and the +# referenced fragment. +# +# C) Also, if multiple fragments are referenced, then comparisons +# are made "between" each referenced fragment. +# +# D) When comparing "between" a set of fields and a referenced fragment, first +# a comparison is made between each field in the original set of fields and +# each field in the the referenced set of fields. +# +# E) Also, if any fragment is referenced in the referenced selection set, +# then a comparison is made "between" the original set of fields and the +# referenced fragment (recursively referring to step D). +# +# F) When comparing "between" two fragments, first a comparison is made between +# each field in the first referenced set of fields and each field in the the +# second referenced set of fields. +# +# G) Also, any fragments referenced by the first must be compared to the +# second, and any fragments referenced by the second must be compared to the +# first (recursively referring to step F). +# +# H) When comparing two fields, if both have selection sets, then a comparison +# is made "between" both selection sets, first comparing the set of fields in +# the first selection set with the set of fields in the second. +# +# I) Also, if any fragment is referenced in either selection set, then a +# comparison is made "between" the other set of fields and the +# referenced fragment. +# +# J) Also, if two fragments are referenced in both selection sets, then a +# comparison is made "between" the two fragments. + + +def find_conflicts_within_selection_set( + context: ValidationContext, + cached_fields_and_fragment_names: Dict, + compared_fragment_pairs: 'PairSet', + parent_type: Optional[GraphQLNamedType], + selection_set: SelectionSetNode) -> List[Conflict]: + """Find conflicts within selection set. + + Find all conflicts found "within" a selection set, including those found + via spreading in fragments. + + Called when visiting each SelectionSet in the GraphQL Document. + """ + conflicts: List[Conflict] = [] + + field_map, fragment_names = get_fields_and_fragment_names( + context, + cached_fields_and_fragment_names, + parent_type, + selection_set) + + # (A) Find all conflicts "within" the fields of this selection set. + # Note: this is the *only place* `collect_conflicts_within` is called. + collect_conflicts_within( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragment_pairs, + field_map) + + if fragment_names: + compared_fragments: Set[str] = set() + # (B) Then collect conflicts between these fields and those represented + # by each spread fragment name found. + for i, fragment_name in enumerate(fragment_names): + collect_conflicts_between_fields_and_fragment( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragments, + compared_fragment_pairs, + False, + field_map, + fragment_name) + # (C) Then compare this fragment with all other fragments found in + # this selection set to collect conflicts within fragments spread + # together. This compares each item in the list of fragment names + # to every other item in that same list (except for itself). + for other_fragment_name in fragment_names[i + 1:]: + collect_conflicts_between_fragments( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragment_pairs, + False, + fragment_name, + other_fragment_name) + + return conflicts + + +def collect_conflicts_between_fields_and_fragment( + context: ValidationContext, + conflicts: List[Conflict], + cached_fields_and_fragment_names: Dict, + compared_fragments: Set[str], + compared_fragment_pairs: 'PairSet', + are_mutually_exclusive: bool, + field_map: NodeAndDefCollection, + fragment_name: str) -> None: + """Collect conflicts between fields and fragment. + + Collect all conflicts found between a set of fields and a fragment + reference including via spreading in any nested fragments. + """ + # Memoize so a fragment is not compared for conflicts more than once. + if fragment_name in compared_fragments: + return + compared_fragments.add(fragment_name) + + fragment = context.get_fragment(fragment_name) + if not fragment: + return None + + field_map2, fragment_names2 = get_referenced_fields_and_fragment_names( + context, + cached_fields_and_fragment_names, + fragment) + + # Do not compare a fragment's fieldMap to itself. + if field_map is field_map2: + return + + # (D) First collect any conflicts between the provided collection of fields + # and the collection of fields represented by the given fragment. + collect_conflicts_between( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragment_pairs, + are_mutually_exclusive, + field_map, + field_map2) + + # (E) Then collect any conflicts between the provided collection of fields + # and any fragment names found in the given fragment. + for fragment_name2 in fragment_names2: + collect_conflicts_between_fields_and_fragment( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragments, + compared_fragment_pairs, + are_mutually_exclusive, + field_map, + fragment_name2) + + +def collect_conflicts_between_fragments( + context: ValidationContext, + conflicts: List[Conflict], + cached_fields_and_fragment_names: Dict, + compared_fragment_pairs: 'PairSet', + are_mutually_exclusive: bool, + fragment_name1: str, + fragment_name2: str) -> None: + """Collect conflicts between fragments. + + Collect all conflicts found between two fragments, including via spreading + in any nested fragments + """ + # No need to compare a fragment to itself. + if fragment_name1 == fragment_name2: + return + + # Memoize so two fragments are not compared for conflicts more than once. + if compared_fragment_pairs.has( + fragment_name1, fragment_name2, are_mutually_exclusive): + return + compared_fragment_pairs.add( + fragment_name1, fragment_name2, are_mutually_exclusive) + + fragment1 = context.get_fragment(fragment_name1) + fragment2 = context.get_fragment(fragment_name2) + if not fragment1 or not fragment2: + return None + + field_map1, fragment_names1 = get_referenced_fields_and_fragment_names( + context, + cached_fields_and_fragment_names, + fragment1) + + field_map2, fragment_names2 = get_referenced_fields_and_fragment_names( + context, + cached_fields_and_fragment_names, + fragment2) + + # (F) First, collect all conflicts between these two collections of fields + # (not including any nested fragments) + collect_conflicts_between( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragment_pairs, + are_mutually_exclusive, + field_map1, + field_map2) + + # (G) Then collect conflicts between the first fragment and any nested + # fragments spread in the second fragment. + for nested_fragment_name2 in fragment_names2: + collect_conflicts_between_fragments( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragment_pairs, + are_mutually_exclusive, + fragment_name1, + nested_fragment_name2) + + # (G) Then collect conflicts between the second fragment and any nested + # fragments spread in the first fragment. + for nested_fragment_name1 in fragment_names1: + collect_conflicts_between_fragments( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragment_pairs, + are_mutually_exclusive, + nested_fragment_name1, + fragment_name2) + + +def find_conflicts_between_sub_selection_sets( + context: ValidationContext, + cached_fields_and_fragment_names: Dict, + compared_fragment_pairs: 'PairSet', + are_mutually_exclusive: bool, + parent_type1: Optional[GraphQLNamedType], + selection_set1: SelectionSetNode, + parent_type2: Optional[GraphQLNamedType], + selection_set2: SelectionSetNode) -> List[Conflict]: + """Find conflicts between sub selection sets. + + Find all conflicts found between two selection sets, including those found + via spreading in fragments. Called when determining if conflicts exist + between the sub-fields of two overlapping fields. + """ + conflicts: List[Conflict] = [] + + field_map1, fragment_names1 = get_fields_and_fragment_names( + context, + cached_fields_and_fragment_names, + parent_type1, + selection_set1) + field_map2, fragment_names2 = get_fields_and_fragment_names( + context, + cached_fields_and_fragment_names, + parent_type2, + selection_set2) + + # (H) First, collect all conflicts between these two collections of field. + collect_conflicts_between( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragment_pairs, + are_mutually_exclusive, + field_map1, + field_map2) + + # (I) Then collect conflicts between the first collection of fields and + # those referenced by each fragment name associated with the second. + if fragment_names2: + compared_fragments: Set[str] = set() + for fragment_name2 in fragment_names2: + collect_conflicts_between_fields_and_fragment( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragments, + compared_fragment_pairs, + are_mutually_exclusive, + field_map1, + fragment_name2) + + # (I) Then collect conflicts between the second collection of fields and + # those referenced by each fragment name associated with the first. + if fragment_names1: + compared_fragments = set() + for fragment_name1 in fragment_names1: + collect_conflicts_between_fields_and_fragment( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragments, + compared_fragment_pairs, + are_mutually_exclusive, + field_map2, + fragment_name1) + + # (J) Also collect conflicts between any fragment names by the first and + # fragment names by the second. This compares each item in the first set of + # names to each item in the second set of names. + for fragment_name1 in fragment_names1: + for fragment_name2 in fragment_names2: + collect_conflicts_between_fragments( + context, + conflicts, + cached_fields_and_fragment_names, + compared_fragment_pairs, + are_mutually_exclusive, + fragment_name1, + fragment_name2) + + return conflicts + + +def collect_conflicts_within( + context: ValidationContext, + conflicts: List[Conflict], + cached_fields_and_fragment_names: Dict, + compared_fragment_pairs: 'PairSet', + field_map: NodeAndDefCollection) -> None: + """Collect all Conflicts "within" one collection of fields.""" + # A field map is a keyed collection, where each key represents a response + # name and the value at that key is a list of all fields which provide that + # response name. For every response name, if there are multiple fields, + # they must be compared to find a potential conflict. + for response_name, fields in field_map.items(): + # This compares every field in the list to every other field in this + # list (except to itself). If the list only has one item, nothing needs + # to be compared. + if len(fields) > 1: + for i, field in enumerate(fields): + for other_field in fields[i + 1:]: + conflict = find_conflict( + context, + cached_fields_and_fragment_names, + compared_fragment_pairs, + # within one collection is never mutually exclusive + False, + response_name, + field, + other_field) + if conflict: + conflicts.append(conflict) + + +def collect_conflicts_between( + context: ValidationContext, + conflicts: List[Conflict], + cached_fields_and_fragment_names: Dict, + compared_fragment_pairs: 'PairSet', + parent_fields_are_mutually_exclusive: bool, + field_map1: NodeAndDefCollection, + field_map2: NodeAndDefCollection) -> None: + """Collect all Conflicts between two collections of fields. + + This is similar to, but different from the `collectConflictsWithin` + function above. This check assumes that `collectConflictsWithin` has + already been called on each provided collection of fields. This is true + because this validator traverses each individual selection set. + """ + # A field map is a keyed collection, where each key represents a response + # name and the value at that key is a list of all fields which provide that + # response name. For any response name which appears in both provided field + # maps, each field from the first field map must be compared to every field + # in the second field map to find potential conflicts. + for response_name, fields1 in field_map1.items(): + fields2 = field_map2.get(response_name) + if fields2: + for field1 in fields1: + for field2 in fields2: + conflict = find_conflict( + context, + cached_fields_and_fragment_names, + compared_fragment_pairs, + parent_fields_are_mutually_exclusive, + response_name, + field1, + field2) + if conflict: + conflicts.append(conflict) + + +def find_conflict( + context: ValidationContext, + cached_fields_and_fragment_names: Dict, + compared_fragment_pairs: 'PairSet', + parent_fields_are_mutually_exclusive: bool, + response_name: str, + field1: NodeAndDef, + field2: NodeAndDef) -> Optional[Conflict]: + """Find conflict. + + Determines if there is a conflict between two particular fields, including + comparing their sub-fields. + """ + parent_type1, node1, def1 = field1 + parent_type2, node2, def2 = field2 + + # If it is known that two fields could not possibly apply at the same + # time, due to the parent types, then it is safe to permit them to diverge + # in aliased field or arguments used as they will not present any ambiguity + # by differing. + # It is known that two parent types could never overlap if they are + # different Object types. Interface or Union types might overlap - if not + # in the current state of the schema, then perhaps in some future version, + # thus may not safely diverge. + are_mutually_exclusive = ( + parent_fields_are_mutually_exclusive or ( + parent_type1 != parent_type2 and + is_object_type(parent_type1) and + is_object_type(parent_type2))) + + # The return type for each field. + type1 = cast(Optional[GraphQLOutputType], def1 and def1.type) + type2 = cast(Optional[GraphQLOutputType], def2 and def2.type) + + if not are_mutually_exclusive: + # Two aliases must refer to the same field. + name1 = node1.name.value + name2 = node2.name.value + if name1 != name2: + return ( + (response_name, f'{name1} and {name2} are different fields'), + [node1], + [node2]) + + # Two field calls must have the same arguments. + if not same_arguments(node1.arguments or [], node2.arguments or []): + return ( + (response_name, 'they have differing arguments'), + [node1], + [node2]) + + if type1 and type2 and do_types_conflict(type1, type2): + return ( + (response_name, 'they return conflicting types' + f' {type1} and {type2}'), + [node1], + [node2]) + + # Collect and compare sub-fields. Use the same "visited fragment names" + # list for both collections so fields in a fragment reference are never + # compared to themselves. + selection_set1 = node1.selection_set + selection_set2 = node2.selection_set + if selection_set1 and selection_set2: + conflicts = find_conflicts_between_sub_selection_sets( + context, + cached_fields_and_fragment_names, + compared_fragment_pairs, + are_mutually_exclusive, + get_named_type(type1), + selection_set1, + get_named_type(type2), + selection_set2) + return subfield_conflicts(conflicts, response_name, node1, node2) + + return None # no conflict + + +def same_arguments( + arguments1: Sequence[ArgumentNode], + arguments2: Sequence[ArgumentNode]) -> bool: + if len(arguments1) != len(arguments2): + return False + for argument1 in arguments1: + for argument2 in arguments2: + if argument2.name.value == argument1.name.value: + if not same_value(argument1.value, argument2.value): + return False + break + else: + return False + return True + + +def same_value(value1, value2): + return (not value1 and not value2) or ( + print_ast(value1) == print_ast(value2)) + + +def do_types_conflict( + type1: GraphQLOutputType, + type2: GraphQLOutputType) -> bool: + """Check whether two types conflict + + Two types conflict if both types could not apply to a value simultaneously. + Composite types are ignored as their individual field types will be + compared later recursively. However List and Non-Null types must match. + """ + if is_list_type(type1): + return do_types_conflict( + cast(GraphQLList, type1).of_type, + cast(GraphQLList, type2).of_type + ) if is_list_type(type2) else True + if is_list_type(type2): + return True + if is_non_null_type(type1): + return do_types_conflict( + cast(GraphQLNonNull, type1).of_type, + cast(GraphQLNonNull, type2).of_type + ) if is_non_null_type(type2) else True + if is_non_null_type(type2): + return True + if is_leaf_type(type1) or is_leaf_type(type2): + return type1 is not type2 + return False + + +def get_fields_and_fragment_names( + context: ValidationContext, + cached_fields_and_fragment_names: Dict, + parent_type: Optional[GraphQLNamedType], + selection_set: SelectionSetNode + ) -> Tuple[NodeAndDefCollection, List[str]]: + """Get fields and referenced fragment names + + Given a selection set, return the collection of fields (a mapping of + response name to field nodes and definitions) as well as a list of fragment + names referenced via fragment spreads. + """ + cached = cached_fields_and_fragment_names.get(selection_set) + if not cached: + node_and_defs: NodeAndDefCollection = {} + fragment_names: Dict[str, bool] = {} + collect_fields_and_fragment_names( + context, + parent_type, + selection_set, + node_and_defs, + fragment_names) + cached = (node_and_defs, list(fragment_names)) + cached_fields_and_fragment_names[selection_set] = cached + return cached + + +def get_referenced_fields_and_fragment_names( + context: ValidationContext, + cached_fields_and_fragment_names: Dict, + fragment: FragmentDefinitionNode + ) -> Tuple[NodeAndDefCollection, List[str]]: + """Get referenced fields and nested fragment names + + Given a reference to a fragment, return the represented collection of + fields as well as a list of nested fragment names referenced via fragment + spreads. + """ + # Short-circuit building a type from the node if possible. + cached = cached_fields_and_fragment_names.get(fragment.selection_set) + if cached: + return cached + + fragment_type = type_from_ast(context.schema, fragment.type_condition) + return get_fields_and_fragment_names( + context, + cached_fields_and_fragment_names, + fragment_type, + fragment.selection_set) + + +def collect_fields_and_fragment_names( + context: ValidationContext, + parent_type: Optional[GraphQLNamedType], + selection_set: SelectionSetNode, + node_and_defs: NodeAndDefCollection, + fragment_names: Dict[str, bool]) -> None: + for selection in selection_set.selections: + if isinstance(selection, FieldNode): + field_name = selection.name.value + field_def = (parent_type.fields.get(field_name) # type: ignore + if is_object_type(parent_type) or + is_interface_type(parent_type) else None) + response_name = (selection.alias.value + if selection.alias else field_name) + if not node_and_defs.get(response_name): + node_and_defs[response_name] = [] + node_and_defs[response_name].append( + cast(NodeAndDef, (parent_type, selection, field_def))) + elif isinstance(selection, FragmentSpreadNode): + fragment_names[selection.name.value] = True + elif isinstance(selection, InlineFragmentNode): + type_condition = selection.type_condition + inline_fragment_type = ( + type_from_ast(context.schema, type_condition) + if type_condition else parent_type) + collect_fields_and_fragment_names( + context, + inline_fragment_type, + selection.selection_set, + node_and_defs, + fragment_names) + + +def subfield_conflicts( + conflicts: List[Conflict], + response_name: str, + node1: FieldNode, + node2: FieldNode) -> Optional[Conflict]: + """Check whether there are conflicts between sub-fields. + + Given a series of Conflicts which occurred between two sub-fields, + generate a single Conflict. + """ + if conflicts: + return ( + (response_name, [conflict[0] for conflict in conflicts]), + list(chain([node1], *[conflict[1] for conflict in conflicts])), + list(chain([node2], *[conflict[2] for conflict in conflicts]))) + return None # no conflict + + +class PairSet: + """Pair set + + A way to keep track of pairs of things when the ordering of the pair does + not matter. We do this by maintaining a sort of double adjacency sets. + """ + + __slots__ = '_data', + + def __init__(self): + self._data: Dict[str, Dict[str, bool]] = {} + + def has(self, a: str, b: str, are_mutually_exclusive: bool): + first = self._data.get(a) + result = first and first.get(b) + if result is None: + return False + # are_mutually_exclusive being false is a superset of being true, + # hence if we want to know if this PairSet "has" these two with no + # exclusivity, we have to ensure it was added as such. + if not are_mutually_exclusive: + return not result + return True + + def add(self, a: str, b: str, are_mutually_exclusive: bool): + self._pair_set_add(a, b, are_mutually_exclusive) + self._pair_set_add(b, a, are_mutually_exclusive) + return self + + def _pair_set_add(self, a: str, b: str, are_mutually_exclusive: bool): + a_map = self._data.get(a) + if not a_map: + self._data[a] = a_map = {} + a_map[b] = are_mutually_exclusive diff --git a/graphql/validation/rules/possible_fragment_spreads.py b/graphql/validation/rules/possible_fragment_spreads.py new file mode 100644 index 00000000..37e3605f --- /dev/null +++ b/graphql/validation/rules/possible_fragment_spreads.py @@ -0,0 +1,60 @@ +from ...error import GraphQLError +from ...type import is_composite_type +from ...utilities import do_types_overlap, type_from_ast +from . import ValidationRule + +__all__ = [ + 'PossibleFragmentSpreadsRule', + 'type_incompatible_spread_message', + 'type_incompatible_anon_spread_message'] + + +def type_incompatible_spread_message( + frag_name: str, parent_type: str, frag_type: str) -> str: + return (f"Fragment '{frag_name}' cannot be spread here as objects" + f" of type '{parent_type}' can never be of type '{frag_type}'.") + + +def type_incompatible_anon_spread_message( + parent_type: str, frag_type: str) -> str: + return (f'Fragment cannot be spread here as objects' + f" of type '{parent_type}' can never be of type '{frag_type}'.") + + +class PossibleFragmentSpreadsRule(ValidationRule): + """Possible fragment spread + + A fragment spread is only valid if the type condition could ever possibly + be true: if there is a non-empty intersection of the possible parent types, + and possible types which pass the type condition. + """ + + def enter_inline_fragment(self, node, *_args): + context = self.context + frag_type = context.get_type() + parent_type = context.get_parent_type() + if (is_composite_type(frag_type) and is_composite_type(parent_type) and + not do_types_overlap(context.schema, frag_type, parent_type)): + context.report_error(GraphQLError( + type_incompatible_anon_spread_message( + str(parent_type), str(frag_type)), + [node])) + + def enter_fragment_spread(self, node, *_args): + context = self.context + frag_name = node.name.value + frag_type = self.get_fragment_type(frag_name) + parent_type = context.get_parent_type() + if frag_type and parent_type and not do_types_overlap( + context.schema, frag_type, parent_type): + context.report_error(GraphQLError( + type_incompatible_spread_message( + frag_name, str(parent_type), str(frag_type)), [node])) + + def get_fragment_type(self, name): + context = self.context + frag = context.get_fragment(name) + if frag: + type_ = type_from_ast(context.schema, frag.type_condition) + if is_composite_type(type_): + return type_ diff --git a/graphql/validation/rules/provided_required_arguments.py b/graphql/validation/rules/provided_required_arguments.py new file mode 100644 index 00000000..988c7e68 --- /dev/null +++ b/graphql/validation/rules/provided_required_arguments.py @@ -0,0 +1,57 @@ +from ...error import GraphQLError, INVALID +from ...type import is_non_null_type +from . import ValidationRule + +__all__ = [ + 'ProvidedRequiredArgumentsRule', + 'missing_field_arg_message', 'missing_directive_arg_message'] + + +def missing_field_arg_message( + field_name: str, arg_name: str, type_: str) -> str: + return (f"Field '{field_name}' argument '{arg_name}'" + f" of type '{type_}' is required but not provided.") + + +def missing_directive_arg_message( + directive_name: str, arg_name: str, type_: str) -> str: + return (f"Directive '@{directive_name}' argument '{arg_name}'" + f" of type '{type_}' is required but not provided.") + + +class ProvidedRequiredArgumentsRule(ValidationRule): + """Provided required arguments + + A field or directive is only valid if all required (non-null without a + default value) field arguments have been provided. + """ + + def leave_field(self, node, *_args): + # Validate on leave to allow for deeper errors to appear first. + field_def = self.context.get_field_def() + if not field_def: + return self.SKIP + arg_nodes = node.arguments or [] + + arg_node_map = {arg.name.value: arg for arg in arg_nodes} + for arg_name, arg_def in field_def.args.items(): + arg_node = arg_node_map.get(arg_name) + if not arg_node and is_non_null_type( + arg_def.type) and arg_def.default_value is INVALID: + self.report_error(GraphQLError(missing_field_arg_message( + node.name.value, arg_name, str(arg_def.type)), [node])) + + def leave_directive(self, node, *_args): + # Validate on leave to allow for deeper errors to appear first. + directive_def = self.context.get_directive() + if not directive_def: + return False + arg_nodes = node.arguments or [] + + arg_node_map = {arg.name.value: arg for arg in arg_nodes} + for arg_name, arg_def in directive_def.args.items(): + arg_node = arg_node_map.get(arg_name) + if not arg_node and is_non_null_type( + arg_def.type) and arg_def.default_value is INVALID: + self.report_error(GraphQLError(missing_directive_arg_message( + node.name.value, arg_name, str(arg_def.type)), [node])) diff --git a/graphql/validation/rules/scalar_leafs.py b/graphql/validation/rules/scalar_leafs.py new file mode 100644 index 00000000..803c9dac --- /dev/null +++ b/graphql/validation/rules/scalar_leafs.py @@ -0,0 +1,43 @@ +from ...error import GraphQLError +from ...type import get_named_type, is_leaf_type +from . import ValidationRule + +__all__ = [ + 'ScalarLeafsRule', + 'no_subselection_allowed_message', 'required_subselection_message'] + + +def no_subselection_allowed_message( + field_name: str, type_: str) -> str: + return (f"Field '{field_name}' must not have a sub selection" + f" since type '{type_}' has no subfields.") + + +def required_subselection_message( + field_name: str, type_: str) -> str: + return (f"Field '{field_name}' of type '{type_}' must have a" + ' sub selection of subfields.' + f" Did you mean '{field_name} {{ ... }}'?") + + +class ScalarLeafsRule(ValidationRule): + """Scalar leafs + + A GraphQL document is valid only if all leaf fields (fields without + sub selections) are of scalar or enum types. + """ + + def enter_field(self, node, *_args): + type_ = self.context.get_type() + if type_: + selection_set = node.selection_set + if is_leaf_type(get_named_type(type_)): + if selection_set: + self.report_error(GraphQLError( + no_subselection_allowed_message( + node.name.value, str(type_)), + [node.selection_set])) + elif not selection_set: + self.report_error(GraphQLError( + required_subselection_message(node.name.value, str(type_)), + [node])) diff --git a/graphql/validation/rules/single_field_subscriptions.py b/graphql/validation/rules/single_field_subscriptions.py new file mode 100644 index 00000000..b1b47bce --- /dev/null +++ b/graphql/validation/rules/single_field_subscriptions.py @@ -0,0 +1,27 @@ +from typing import Optional + +from ...error import GraphQLError +from ...language import OperationDefinitionNode, OperationType +from . import ValidationRule + +__all__ = ['SingleFieldSubscriptionsRule', 'single_field_only_message'] + + +def single_field_only_message(name: Optional[str]) -> str: + return ((f"Subscription '{name}'" if name else 'Anonymous Subscription') + + ' must select only one top level field.') + + +class SingleFieldSubscriptionsRule(ValidationRule): + """Subscriptions must only include one field. + + A GraphQL subscription is valid only if it contains a single root + """ + + def enter_operation_definition( + self, node: OperationDefinitionNode, *_args): + if node.operation == OperationType.SUBSCRIPTION: + if len(node.selection_set.selections) != 1: + self.report_error(GraphQLError(single_field_only_message( + node.name.value if node.name else None), + node.selection_set.selections[1:])) diff --git a/graphql/validation/rules/unique_argument_names.py b/graphql/validation/rules/unique_argument_names.py new file mode 100644 index 00000000..61487d72 --- /dev/null +++ b/graphql/validation/rules/unique_argument_names.py @@ -0,0 +1,37 @@ +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['UniqueArgumentNamesRule', 'duplicate_arg_message'] + + +def duplicate_arg_message(arg_name: str) -> str: + return f"There can only be one argument named '{arg_name}'." + + +class UniqueArgumentNamesRule(ValidationRule): + """Unique argument names + + A GraphQL field or directive is only valid if all supplied arguments are + uniquely named. + """ + + def __init__(self, context): + super().__init__(context) + self.known_arg_names = {} + + def enter_field(self, *_args): + self.known_arg_names.clear() + + def enter_directive(self, *_args): + self.known_arg_names.clear() + + def enter_argument(self, node, *_args): + known_arg_names = self.known_arg_names + arg_name = node.name.value + if arg_name in known_arg_names: + self.report_error(GraphQLError( + duplicate_arg_message(arg_name), + [known_arg_names[arg_name], node.name])) + else: + known_arg_names[arg_name] = node.name + return self.SKIP diff --git a/graphql/validation/rules/unique_directives_per_location.py b/graphql/validation/rules/unique_directives_per_location.py new file mode 100644 index 00000000..ee14dbbe --- /dev/null +++ b/graphql/validation/rules/unique_directives_per_location.py @@ -0,0 +1,36 @@ +from typing import List + +from ...language import DirectiveNode +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['UniqueDirectivesPerLocationRule', 'duplicate_directive_message'] + + +def duplicate_directive_message(directive_name: str) -> str: + return (f"The directive '{directive_name}'" + ' can only be used once at this location.') + + +class UniqueDirectivesPerLocationRule(ValidationRule): + """Unique directive names per location + + A GraphQL document is only valid if all directives at a given location + are uniquely named. + """ + + # Many different AST nodes may contain directives. Rather than listing + # them all, just listen for entering any node, and check to see if it + # defines any directives. + def enter(self, node, *_args): + directives: List[DirectiveNode] = getattr(node, 'directives', None) + if directives: + known_directives = {} + for directive in directives: + directive_name = directive.name.value + if directive_name in known_directives: + self.report_error(GraphQLError( + duplicate_directive_message(directive_name), + [known_directives[directive_name], directive])) + else: + known_directives[directive_name] = directive diff --git a/graphql/validation/rules/unique_fragment_names.py b/graphql/validation/rules/unique_fragment_names.py new file mode 100644 index 00000000..dd777fe1 --- /dev/null +++ b/graphql/validation/rules/unique_fragment_names.py @@ -0,0 +1,34 @@ +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['UniqueFragmentNamesRule', 'duplicate_fragment_name_message'] + + +def duplicate_fragment_name_message(frag_name: str) -> str: + return f"There can only be one fragment named '{frag_name}'." + + +class UniqueFragmentNamesRule(ValidationRule): + """Unique fragment names + + A GraphQL document is only valid if all defined fragments have unique + names. + """ + + def __init__(self, context): + super().__init__(context) + self.known_fragment_names = {} + + def enter_operation_definition(self, *_args): + return self.SKIP + + def enter_fragment_definition(self, node, *_args): + known_fragment_names = self.known_fragment_names + fragment_name = node.name.value + if fragment_name in known_fragment_names: + self.report_error(GraphQLError( + duplicate_fragment_name_message(fragment_name), + [known_fragment_names[fragment_name], node.name])) + else: + known_fragment_names[fragment_name] = node.name + return self.SKIP diff --git a/graphql/validation/rules/unique_input_field_names.py b/graphql/validation/rules/unique_input_field_names.py new file mode 100644 index 00000000..c66bbeba --- /dev/null +++ b/graphql/validation/rules/unique_input_field_names.py @@ -0,0 +1,38 @@ +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['UniqueInputFieldNamesRule', 'duplicate_input_field_message'] + + +def duplicate_input_field_message(field_name: str) -> str: + return f"There can only be one input field named '{field_name}'." + + +class UniqueInputFieldNamesRule(ValidationRule): + """Unique input field names + + A GraphQL input object value is only valid if all supplied fields are + uniquely named. + """ + + def __init__(self, context): + super().__init__(context) + self.known_names_stack = [] + self.known_names = {} + + def enter_object_value(self, *_args): + self.known_names_stack.append(self.known_names) + self.known_names = {} + + def leave_object_value(self, *_args): + self.known_names = self.known_names_stack.pop() + + def enter_object_field(self, node, *_args): + known_names = self.known_names + field_name = node.name.value + if field_name in known_names: + self.report_error(GraphQLError(duplicate_input_field_message( + field_name), [known_names[field_name], node.name])) + else: + known_names[field_name] = node.name + return False diff --git a/graphql/validation/rules/unique_operation_names.py b/graphql/validation/rules/unique_operation_names.py new file mode 100644 index 00000000..70bc6152 --- /dev/null +++ b/graphql/validation/rules/unique_operation_names.py @@ -0,0 +1,36 @@ +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['UniqueOperationNamesRule', 'duplicate_operation_name_message'] + + +def duplicate_operation_name_message(operation_name: str) -> str: + return f"There can only be one operation named '{operation_name}'." + + +class UniqueOperationNamesRule(ValidationRule): + """Unique operation names + + A GraphQL document is only valid if all defined operations have unique + names. + """ + + def __init__(self, context): + super().__init__(context) + self.known_operation_names = {} + + def enter_operation_definition(self, node, *_args): + operation_name = node.name + if operation_name: + known_operation_names = self.known_operation_names + if operation_name.value in known_operation_names: + self.report_error(GraphQLError( + duplicate_operation_name_message(operation_name.value), + [known_operation_names[operation_name.value], + operation_name])) + else: + known_operation_names[operation_name.value] = operation_name + return self.SKIP + + def enter_fragment_definition(self, *_args): + return self.SKIP diff --git a/graphql/validation/rules/unique_variable_names.py b/graphql/validation/rules/unique_variable_names.py new file mode 100644 index 00000000..145c6c5c --- /dev/null +++ b/graphql/validation/rules/unique_variable_names.py @@ -0,0 +1,32 @@ +from ...error import GraphQLError +from . import ValidationRule + +__all__ = ['UniqueVariableNamesRule', 'duplicate_variable_message'] + + +def duplicate_variable_message(variable_name: str) -> str: + return f"There can be only one variable named '{variable_name}'." + + +class UniqueVariableNamesRule(ValidationRule): + """Unique variable names + + A GraphQL operation is only valid if all its variables are uniquely named. + """ + + def __init__(self, context): + super().__init__(context) + self.known_variable_names = {} + + def enter_operation_definition(self, *_args): + self.known_variable_names.clear() + + def enter_variable_definition(self, node, *_args): + known_variable_names = self.known_variable_names + variable_name = node.variable.name.value + if variable_name in known_variable_names: + self.report_error(GraphQLError( + duplicate_variable_message(variable_name), + [known_variable_names[variable_name], node.variable.name])) + else: + known_variable_names[variable_name] = node.variable.name diff --git a/graphql/validation/rules/values_of_correct_type.py b/graphql/validation/rules/values_of_correct_type.py new file mode 100644 index 00000000..7baf8610 --- /dev/null +++ b/graphql/validation/rules/values_of_correct_type.py @@ -0,0 +1,145 @@ +from typing import Optional, cast + +from ...error import GraphQLError, INVALID +from ...language import ValueNode, print_ast +from ...pyutils import is_invalid, or_list, suggestion_list +from ...type import ( + GraphQLEnumType, GraphQLScalarType, GraphQLType, + get_named_type, get_nullable_type, is_enum_type, is_input_object_type, + is_list_type, is_non_null_type, is_scalar_type) +from . import ValidationRule + +__all__ = [ + 'ValuesOfCorrectTypeRule', + 'bad_value_message', 'required_field_message', 'unknown_field_message'] + + +def bad_value_message( + type_name: str, value_name: str, message: str=None) -> str: + return f'Expected type {type_name}, found {value_name}' + ( + f'; {message}' if message else '.') + + +def required_field_message( + type_name: str, field_name: str, field_type_name: str) -> str: + return (f'Field {type_name}.{field_name} of required type' + f' {field_type_name} was not provided.') + + +def unknown_field_message( + type_name: str, field_name: str, message: str=None) -> str: + return f'Field {field_name} is not defined by type {type_name}' + ( + f'; {message}' if message else '.') + + +class ValuesOfCorrectTypeRule(ValidationRule): + """Value literals of correct type + + A GraphQL document is only valid if all value literals are of the type + expected at their position. + """ + + def enter_null_value(self, node, *_args): + type_ = self.context.get_input_type() + if is_non_null_type(type_): + self.report_error(GraphQLError( + bad_value_message(type_, print_ast(node)), node)) + + def enter_list_value(self, node, *_args): + # Note: TypeInfo will traverse into a list's item type, so look to the + # parent input type to check if it is a list. + type_ = get_nullable_type(self.context.get_parent_input_type()) + if not is_list_type(type_): + self.is_valid_scalar(node) + return self.SKIP # Don't traverse further. + + def enter_object_value(self, node, *_args): + type_ = get_named_type(self.context.get_input_type()) + if not is_input_object_type(type_): + self.is_valid_scalar(node) + return self.SKIP # Don't traverse further. + # Ensure every required field exists. + input_fields = type_.fields + field_node_map = {field.name.value: field for field in node.fields} + for field_name, field_def in input_fields.items(): + field_type = field_def.type + field_node = field_node_map.get(field_name) + if not field_node and is_non_null_type( + field_type) and field_def.default_value is INVALID: + self.report_error(GraphQLError(required_field_message( + type_.name, field_name, field_type), node)) + + def enter_object_field(self, node, *_args): + parent_type = get_named_type(self.context.get_parent_input_type()) + field_type = self.context.get_input_type() + if not field_type and is_input_object_type(parent_type): + suggestions = suggestion_list( + node.name.value, list(parent_type.fields)) + did_you_mean = (f'Did you mean {or_list(suggestions)}?' + if suggestions else None) + self.report_error(GraphQLError(unknown_field_message( + parent_type.name, node.name.value, did_you_mean), node)) + + def enter_enum_value(self, node, *_args): + type_ = get_named_type(self.context.get_input_type()) + if not is_enum_type(type_): + self.is_valid_scalar(node) + elif node.value not in type_.values: + self.report_error(GraphQLError(bad_value_message( + type_.name, print_ast(node), + enum_type_suggestion(type_, node)), node)) + + def enter_int_value(self, node, *_args): + self.is_valid_scalar(node) + + def enter_float_value(self, node, *_args): + self.is_valid_scalar(node) + + def enter_string_value(self, node, *_args): + self.is_valid_scalar(node) + + def enter_boolean_value(self, node, *_args): + self.is_valid_scalar(node) + + def is_valid_scalar(self, node: ValueNode) -> None: + """Check whether this is a valid scalar. + + Any value literal may be a valid representation of a Scalar, depending + on that scalar type. + """ + # Report any error at the full type expected by the location. + location_type = self.context.get_input_type() + if not location_type: + return + + type_ = get_named_type(location_type) + + if not is_scalar_type(type_): + self.report_error(GraphQLError(bad_value_message( + location_type, print_ast(node), + enum_type_suggestion(type_, node)), node)) + return + + # Scalars determine if a literal value is valid via parse_literal() + # which may throw or return an invalid value to indicate failure. + type_ = cast(GraphQLScalarType, type_) + try: + parse_result = type_.parse_literal(node) + if is_invalid(parse_result): + self.report_error(GraphQLError(bad_value_message( + location_type, print_ast(node)), node)) + except Exception as error: + # Ensure a reference to the original error is maintained. + self.report_error(GraphQLError(bad_value_message( + location_type, print_ast(node), str(error)), + node, original_error=error)) + + +def enum_type_suggestion(type_: GraphQLType, node: ValueNode) -> Optional[str]: + if is_enum_type(type_): + type_ = cast(GraphQLEnumType, type_) + suggestions = suggestion_list( + print_ast(node), list(type_.values)) + if suggestions: + return f'Did you mean the enum value {or_list(suggestions)}?' + return None diff --git a/graphql/validation/rules/variables_are_input_types.py b/graphql/validation/rules/variables_are_input_types.py new file mode 100644 index 00000000..8b5aadce --- /dev/null +++ b/graphql/validation/rules/variables_are_input_types.py @@ -0,0 +1,30 @@ +from ...error import GraphQLError +from ...language import print_ast +from ...type import is_input_type +from ...utilities import type_from_ast +from . import ValidationRule + +__all__ = ['VariablesAreInputTypesRule', 'non_input_type_on_var_message'] + + +def non_input_type_on_var_message( + variable_name: str, type_name: str) -> str: + return (f"Variable '${variable_name}'" + f" cannot be non-input type '{type_name}'.") + + +class VariablesAreInputTypesRule(ValidationRule): + """Variables are input types + + A GraphQL operation is only valid if all the variables it defines are of + input types (scalar, enum, or input object). + """ + + def enter_variable_definition(self, node, *_args): + type_ = type_from_ast(self.context.schema, node.type) + + # If the variable type is not an input type, return an error. + if type_ and not is_input_type(type_): + variable_name = node.variable.name.value + self.report_error(GraphQLError(non_input_type_on_var_message( + variable_name, print_ast(node.type)), [node.type])) diff --git a/graphql/validation/rules/variables_in_allowed_position.py b/graphql/validation/rules/variables_in_allowed_position.py new file mode 100644 index 00000000..4e142bbb --- /dev/null +++ b/graphql/validation/rules/variables_in_allowed_position.py @@ -0,0 +1,80 @@ +from typing import Any, Optional, cast + +from ...error import GraphQLError, INVALID +from ...language import ValueNode, NullValueNode +from ...type import ( + GraphQLNonNull, GraphQLSchema, GraphQLType, is_non_null_type) +from ...utilities import type_from_ast, is_type_sub_type_of +from . import ValidationRule + +__all__ = ['VariablesInAllowedPositionRule', 'bad_var_pos_message'] + + +def bad_var_pos_message( + var_name: str, var_type: str, expected_type: str) -> str: + return (f"Variable '${var_name}' of type '{var_type}' used" + f" in position expecting type '{expected_type}'.") + + +class VariablesInAllowedPositionRule(ValidationRule): + """Variables passed to field arguments conform to type""" + + def __init__(self, context): + super().__init__(context) + self.var_def_map = {} + + def enter_operation_definition(self, *_args): + self.var_def_map.clear() + + def leave_operation_definition(self, operation, *_args): + var_def_map = self.var_def_map + usages = self.context.get_recursive_variable_usages(operation) + + for usage in usages: + node, type_ = usage.node, usage.type + default_value = usage.default_value + var_name = node.name.value + var_def = var_def_map.get(var_name) + if var_def and type_: + # A var type is allowed if it is the same or more strict + # (e.g. is a subtype of) than the expected type. + # It can be more strict if the variable type is non-null + # when the expected type is nullable. + # If both are list types, the variable item type can be + # more strict than the expected item type (contravariant). + schema = self.context.schema + var_type = type_from_ast(schema, var_def.type) + if var_type and not allowed_variable_usage( + schema, var_type, var_def.default_value, + type_, default_value): + self.report_error(GraphQLError( + bad_var_pos_message( + var_name, str(var_type), str(type_)), + [var_def, node])) + + def enter_variable_definition(self, node, *_args): + self.var_def_map[node.variable.name.value] = node + + +def allowed_variable_usage( + schema: GraphQLSchema, var_type: GraphQLType, + var_default_value: Optional[ValueNode], + location_type: GraphQLType, location_default_value: Any) -> bool: + """Check for allowed variable usage. + + Returns True if the variable is allowed in the location it was found, + which includes considering if default values exist for either the variable + or the location at which it is located. + """ + if is_non_null_type(location_type) and not is_non_null_type(var_type): + has_non_null_variable_default_value = ( + var_default_value and not isinstance( + var_default_value, NullValueNode)) + has_location_default_value = location_default_value is not INVALID + if (not has_non_null_variable_default_value + and not has_location_default_value): + return False + location_type = cast(GraphQLNonNull, location_type) + nullable_location_type = location_type.of_type + return is_type_sub_type_of(schema, var_type, nullable_location_type) + return is_type_sub_type_of(schema, var_type, location_type) diff --git a/graphql/validation/specified_rules.py b/graphql/validation/specified_rules.py new file mode 100644 index 00000000..539b511d --- /dev/null +++ b/graphql/validation/specified_rules.py @@ -0,0 +1,119 @@ +from typing import List, Type + +from .rules import ValidationRule + +# Spec Section: "Executable Definitions" +from .rules.executable_definitions import ExecutableDefinitionsRule + +# Spec Section: "Operation Name Uniqueness" +from .rules.unique_operation_names import UniqueOperationNamesRule + +# Spec Section: "Lone Anonymous Operation" +from .rules.lone_anonymous_operation import LoneAnonymousOperationRule + +# Spec Section: "Subscriptions with Single Root Field" +from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule + +# Spec Section: "Fragment Spread Type Existence" +from .rules.known_type_names import KnownTypeNamesRule + +# Spec Section: "Fragments on Composite Types" +from .rules.fragments_on_composite_types import FragmentsOnCompositeTypesRule + +# Spec Section: "Variables are Input Types" +from .rules.variables_are_input_types import VariablesAreInputTypesRule + +# Spec Section: "Leaf Field Selections" +from .rules.scalar_leafs import ScalarLeafsRule + +# Spec Section: "Field Selections on Objects, Interfaces, and Unions Types" +from .rules.fields_on_correct_type import FieldsOnCorrectTypeRule + +# Spec Section: "Fragment Name Uniqueness" +from .rules.unique_fragment_names import UniqueFragmentNamesRule + +# Spec Section: "Fragment spread target defined" +from .rules.known_fragment_names import KnownFragmentNamesRule + +# Spec Section: "Fragments must be used" +from .rules.no_unused_fragments import NoUnusedFragmentsRule + +# Spec Section: "Fragment spread is possible" +from .rules.possible_fragment_spreads import PossibleFragmentSpreadsRule + +# Spec Section: "Fragments must not form cycles" +from .rules.no_fragment_cycles import NoFragmentCyclesRule + +# Spec Section: "Variable Uniqueness" +from .rules.unique_variable_names import UniqueVariableNamesRule + +# Spec Section: "All Variable Used Defined" +from .rules.no_undefined_variables import NoUndefinedVariablesRule + +# Spec Section: "All Variables Used" +from .rules.no_unused_variables import NoUnusedVariablesRule + +# Spec Section: "Directives Are Defined" +from .rules.known_directives import KnownDirectivesRule + +# Spec Section: "Directives Are Unique Per Location" +from .rules.unique_directives_per_location import ( + UniqueDirectivesPerLocationRule) + +# Spec Section: "Argument Names" +from .rules.known_argument_names import KnownArgumentNamesRule + +# Spec Section: "Argument Uniqueness" +from .rules.unique_argument_names import UniqueArgumentNamesRule + +# Spec Section: "Value Type Correctness" +from .rules.values_of_correct_type import ValuesOfCorrectTypeRule + +# Spec Section: "Argument Optionality" +from .rules.provided_required_arguments import ProvidedRequiredArgumentsRule + +# Spec Section: "All Variable Usages Are Allowed" +from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule + +# Spec Section: "Field Selection Merging" +from .rules.overlapping_fields_can_be_merged import ( + OverlappingFieldsCanBeMergedRule) + +# Spec Section: "Input Object Field Uniqueness" +from .rules.unique_input_field_names import UniqueInputFieldNamesRule + +__all__ = ['specified_rules'] + + +# This list includes all validation rules defined by the GraphQL spec. +# +# The order of the rules in this list has been adjusted to lead to the +# most clear output when encountering multiple validation errors. + +specified_rules: List[Type[ValidationRule]] = [ + ExecutableDefinitionsRule, + UniqueOperationNamesRule, + LoneAnonymousOperationRule, + SingleFieldSubscriptionsRule, + KnownTypeNamesRule, + FragmentsOnCompositeTypesRule, + VariablesAreInputTypesRule, + ScalarLeafsRule, + FieldsOnCorrectTypeRule, + UniqueFragmentNamesRule, + KnownFragmentNamesRule, + NoUnusedFragmentsRule, + PossibleFragmentSpreadsRule, + NoFragmentCyclesRule, + UniqueVariableNamesRule, + NoUndefinedVariablesRule, + NoUnusedVariablesRule, + KnownDirectivesRule, + UniqueDirectivesPerLocationRule, + KnownArgumentNamesRule, + UniqueArgumentNamesRule, + ValuesOfCorrectTypeRule, + ProvidedRequiredArgumentsRule, + VariablesInAllowedPositionRule, + OverlappingFieldsCanBeMergedRule, + UniqueInputFieldNamesRule] diff --git a/graphql/validation/validate.py b/graphql/validation/validate.py new file mode 100644 index 00000000..f59c221c --- /dev/null +++ b/graphql/validation/validate.py @@ -0,0 +1,53 @@ +from typing import List, Sequence, Type + +from ..error import GraphQLError +from ..language import DocumentNode, ParallelVisitor, TypeInfoVisitor, visit +from ..type import GraphQLSchema, assert_valid_schema +from ..utilities import TypeInfo +from .rules import ValidationRule +from .specified_rules import specified_rules +from .validation_context import ValidationContext + +__all__ = ['validate'] + +RuleType = Type[ValidationRule] + + +def validate(schema: GraphQLSchema, document_ast: DocumentNode, + rules: Sequence[RuleType]=None, + type_info: TypeInfo=None) -> List[GraphQLError]: + """Implements the "Validation" section of the spec. + + Validation runs synchronously, returning a list of encountered errors, or + an empty list if no errors were encountered and the document is valid. + + A list of specific validation rules may be provided. If not provided, the + default list of rules defined by the GraphQL specification will be used. + + Each validation rule is a ValidationRule object which is a visitor object + that holds a ValidationContext (see the language/visitor API). + Visitor methods are expected to return GraphQLErrors, or lists of + GraphQLErrors when invalid. + + Optionally a custom TypeInfo instance may be provided. If not provided, one + will be created from the provided schema. + """ + if not document_ast or not isinstance(document_ast, DocumentNode): + raise TypeError('You must provide a document node.') + # If the schema used for validation is invalid, throw an error. + assert_valid_schema(schema) + if type_info is None: + type_info = TypeInfo(schema) + elif not isinstance(type_info, TypeInfo): + raise TypeError(f'Not a TypeInfo object: {type_info!r}') + if rules is None: + rules = specified_rules + elif not isinstance(rules, (list, tuple)): + raise TypeError('Rules must be passed as a list/tuple.') + context = ValidationContext(schema, document_ast, type_info) + # This uses a specialized visitor which runs multiple visitors in parallel, + # while maintaining the visitor skip and break API. + visitors = [rule(context) for rule in rules] + # Visit the whole document with each instance of all provided rules. + visit(document_ast, TypeInfoVisitor(type_info, ParallelVisitor(visitors))) + return context.errors diff --git a/graphql/validation/validation_context.py b/graphql/validation/validation_context.py new file mode 100644 index 00000000..64bbd289 --- /dev/null +++ b/graphql/validation/validation_context.py @@ -0,0 +1,174 @@ +from typing import Any, Dict, List, NamedTuple, Optional, Set, Union, cast + +from ..error import GraphQLError +from ..language import ( + DocumentNode, FragmentDefinitionNode, FragmentSpreadNode, + OperationDefinitionNode, SelectionSetNode, TypeInfoVisitor, + VariableNode, Visitor, visit) +from ..type import GraphQLSchema, GraphQLInputType +from ..utilities import TypeInfo + +__all__ = ['ValidationContext', 'VariableUsage', 'VariableUsageVisitor'] + + +NodeWithSelectionSet = Union[OperationDefinitionNode, FragmentDefinitionNode] + + +class VariableUsage(NamedTuple): + node: VariableNode + type: Optional[GraphQLInputType] + default_value: Any + + +class VariableUsageVisitor(Visitor): + """Visitor adding all variable usages to a given list.""" + + usages: List[VariableUsage] + + def __init__(self, type_info: TypeInfo) -> None: + self.usages = [] + self._append_usage = self.usages.append + self._type_info = type_info + + def enter_variable_definition(self, *_args): + return self.SKIP + + def enter_variable(self, node, *_args): + type_info = self._type_info + usage = VariableUsage( + node, type_info.get_input_type(), type_info.get_default_value()) + self._append_usage(usage) + + +class ValidationContext: + """Utility class providing a context for validation. + + An instance of this class is passed as the context attribute to all + Validators, allowing access to commonly useful contextual information + from within a validation rule. + """ + + schema: GraphQLSchema + ast: DocumentNode + errors: List[GraphQLError] + + def __init__(self, schema: GraphQLSchema, + ast: DocumentNode, type_info: TypeInfo) -> None: + self.schema = schema + self.ast = ast + self._type_info = type_info + self.errors = [] + self._fragments: Optional[Dict[str, FragmentDefinitionNode]] = None + self._fragment_spreads: Dict[ + SelectionSetNode, List[FragmentSpreadNode]] = {} + self._recursively_referenced_fragments: Dict[ + OperationDefinitionNode, List[FragmentDefinitionNode]] = {} + self._variable_usages: Dict[ + NodeWithSelectionSet, List[VariableUsage]] = {} + self._recursive_variable_usages: Dict[ + OperationDefinitionNode, List[VariableUsage]] = {} + + def report_error(self, error: GraphQLError): + self.errors.append(error) + + def get_fragment(self, name) -> Optional[FragmentDefinitionNode]: + fragments = self._fragments + if fragments is None: + fragments = {} + for statement in self.ast.definitions: + if isinstance(statement, FragmentDefinitionNode): + fragments[statement.name.value] = statement + self._fragments = fragments + return fragments.get(name) + + def get_fragment_spreads( + self, node: SelectionSetNode) -> List[FragmentSpreadNode]: + spreads = self._fragment_spreads.get(node) + if spreads is None: + spreads = [] + append_spread = spreads.append + sets_to_visit = [node] + append_set = sets_to_visit.append + pop_set = sets_to_visit.pop + while sets_to_visit: + visited_set = pop_set() + for selection in visited_set.selections: + if isinstance(selection, FragmentSpreadNode): + append_spread(selection) + else: + set_to_visit = cast( + NodeWithSelectionSet, selection).selection_set + if set_to_visit: + append_set(set_to_visit) + self._fragment_spreads[node] = spreads + return spreads + + def get_recursively_referenced_fragments( + self, operation: OperationDefinitionNode + ) -> List[FragmentDefinitionNode]: + fragments = self._recursively_referenced_fragments.get(operation) + if fragments is None: + fragments = [] + append_fragment = fragments.append + collected_names: Set[str] = set() + add_name = collected_names.add + nodes_to_visit = [operation.selection_set] + append_node = nodes_to_visit.append + pop_node = nodes_to_visit.pop + get_fragment = self.get_fragment + get_fragment_spreads = self.get_fragment_spreads + while nodes_to_visit: + visited_node = pop_node() + for spread in get_fragment_spreads(visited_node): + frag_name = spread.name.value + if frag_name not in collected_names: + add_name(frag_name) + fragment = get_fragment(frag_name) + if fragment: + append_fragment(fragment) + append_node(fragment.selection_set) + self._recursively_referenced_fragments[operation] = fragments + return fragments + + def get_variable_usages( + self, node: NodeWithSelectionSet) -> List[VariableUsage]: + usages = self._variable_usages.get(node) + if usages is None: + usage_visitor = VariableUsageVisitor(self._type_info) + visit(node, TypeInfoVisitor(self._type_info, usage_visitor)) + usages = usage_visitor.usages + self._variable_usages[node] = usages + return usages + + def get_recursive_variable_usages( + self, operation: OperationDefinitionNode) -> List[VariableUsage]: + usages = self._recursive_variable_usages.get(operation) + if usages is None: + get_variable_usages = self.get_variable_usages + usages = get_variable_usages(operation) + fragments = self.get_recursively_referenced_fragments(operation) + for fragment in fragments: + usages.extend(get_variable_usages(fragment)) + self._recursive_variable_usages[operation] = usages + return usages + + def get_type(self): + return self._type_info.get_type() + + def get_parent_type(self): + return self._type_info.get_parent_type() + + def get_input_type(self): + return self._type_info.get_input_type() + + def get_parent_input_type(self): + return self._type_info.get_parent_input_type() + + def get_field_def(self): + return self._type_info.get_field_def() + + def get_directive(self): + return self._type_info.get_directive() + + def get_argument(self): + return self._type_info.get_argument() diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..315809fb --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +python_files = test_*.py harness.py diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..5a1a94e0 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,20 @@ +[bumpversion] +current_version = 1.0.0rc1 +commit = True +tag = True + +[bumpversion:file:setup.py] +search = version='{current_version}' +replace = version='{new_version}' + +[bumpversion:file:graphql/__init__.py] +search = __version__ = '{current_version}' +replace = __version__ = '{new_version}' + +[bdist_wheel] +python-tag = py3 + +[aliases] +# Define setup.py command aliases here +test = pytest + diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..af3a4605 --- /dev/null +++ b/setup.py @@ -0,0 +1,42 @@ +from re import search +from setuptools import setup, find_packages + +with open('graphql/__init__.py') as init_file: + version = search("__version__ = '(.*)'", init_file.read()).group(1) + +with open('README.md') as readme_file: + readme = readme_file.read() + +setup( + name='GraphQL-core-next', + version=version, + + description='GraphQL-core-next is a Python port of GraphQL.js,' + ' the JavaScript reference implementation for GraphQL.', + long_description=readme, + long_description_content_type='text/markdown', + keywords='graphql', + + url='https://github.com/graphql-python/graphql-core-next', + + author='Christoph Zwerschke', + author_email='cito@online.de', + license='MIT license', + + classifiers=[ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7'], + + install_requires=[], + python_requires='>=3.6', + test_suite='tests', + tests_require=[ + 'pytest', 'pytest-asyncio', 'pytest-cov', 'pytest-describe', + 'flake8', 'mypy', 'tox', 'python-coveralls'], + packages=find_packages(include=['graphql']), + include_package_data=True, + zip_safe=False) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..db4fe368 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for graphql""" diff --git a/tests/error/__init__.py b/tests/error/__init__.py new file mode 100644 index 00000000..c93fa184 --- /dev/null +++ b/tests/error/__init__.py @@ -0,0 +1 @@ +"""Tests for graphql.error""" diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py new file mode 100644 index 00000000..602646b3 --- /dev/null +++ b/tests/error/test_graphql_error.py @@ -0,0 +1,102 @@ +from graphql.error import GraphQLError, format_error +from graphql.language import parse, Source + + +def describe_graphql_error(): + + def is_a_class_and_is_a_subclass_of_exception(): + assert issubclass(GraphQLError, Exception) + assert isinstance(GraphQLError('msg'), GraphQLError) + + def has_a_name_message_and_stack_trace(): + e = GraphQLError('msg') + assert e.__class__.__name__ == 'GraphQLError' + assert e.message == 'msg' + + def stores_the_original_error(): + original = Exception('original') + e = GraphQLError('msg', original_error=original) + assert e.__class__.__name__ == 'GraphQLError' + assert e.message == 'msg' + assert e.original_error == original + + def converts_nodes_to_positions_and_locations(): + source = Source('{\n field\n}') + ast = parse(source) + # noinspection PyUnresolvedReferences + field_node = ast.definitions[0].selection_set.selections[0] + e = GraphQLError('msg', [field_node]) + assert e.nodes == [field_node] + assert e.source is source + assert e.positions == [8] + assert e.locations == [(2, 7)] + + def converts_single_node_to_positions_and_locations(): + source = Source('{\n field\n}') + ast = parse(source) + # noinspection PyUnresolvedReferences + field_node = ast.definitions[0].selection_set.selections[0] + e = GraphQLError('msg', field_node) # Non-array value. + assert e.nodes == [field_node] + assert e.source is source + assert e.positions == [8] + assert e.locations == [(2, 7)] + + def converts_node_with_loc_start_zero_to_positions_and_locations(): + source = Source('{\n field\n}') + ast = parse(source) + operations_node = ast.definitions[0] + e = GraphQLError('msg', [operations_node]) + assert e.nodes == [operations_node] + assert e.source is source + assert e.positions == [0] + assert e.locations == [(1, 1)] + + def converts_source_and_positions_to_locations(): + source = Source('{\n field\n}') + # noinspection PyArgumentEqualDefault + e = GraphQLError('msg', None, source, [10]) + assert e.nodes is None + assert e.source is source + assert e.positions == [10] + assert e.locations == [(2, 9)] + + def serializes_to_include_message(): + e = GraphQLError('msg') + assert str(e) == 'msg' + assert repr(e) == "GraphQLError('msg')" + + def serializes_to_include_message_and_locations(): + # noinspection PyUnresolvedReferences + node = parse('{ field }').definitions[0].selection_set.selections[0] + e = GraphQLError('msg', [node]) + assert 'msg' in str(e) + assert '(1:3)' in str(e) + assert repr(e) == ("GraphQLError('msg'," + " locations=[SourceLocation(line=1, column=3)])") + + def serializes_to_include_path(): + path = ['path', 3, 'to', 'field'] + # noinspection PyArgumentEqualDefault + e = GraphQLError('msg', None, None, None, path) + assert e.path is path + assert repr(e) == ("GraphQLError('msg'," + " path=['path', 3, 'to', 'field'])") + + def default_error_formatter_includes_path(): + path = ['path', 3, 'to', 'field'] + # noinspection PyArgumentEqualDefault + e = GraphQLError('msg', None, None, None, path) + formatted = format_error(e) + assert formatted == e.formatted + assert formatted == { + 'message': 'msg', 'locations': None, 'path': path} + + def default_error_formatter_includes_extension_fields(): + # noinspection PyArgumentEqualDefault + e = GraphQLError('msg', None, None, None, None, None, {'foo': 'bar'}) + formatted = format_error(e) + assert formatted == e.formatted + assert formatted == { + 'message': 'msg', 'locations': None, 'path': None, + 'extensions': {'foo': 'bar'}} diff --git a/tests/error/test_located_error.py b/tests/error/test_located_error.py new file mode 100644 index 00000000..15bd0b75 --- /dev/null +++ b/tests/error/test_located_error.py @@ -0,0 +1,24 @@ +from graphql.error import GraphQLError, located_error + + +def describe_located_error(): + + def passes_graphql_error_through(): + path = ['path', 3, 'to', 'field'] + # noinspection PyArgumentEqualDefault + e = GraphQLError('msg', None, None, None, path) + assert located_error(e, [], []) == e + + def passes_graphql_error_ish_through(): + e = Exception('I am an ordinary exception') + e.locations = [] + e.path = [] + e.nodes = [] + e.source = None + e.positions = [] + assert located_error(e, [], []) == e + + def does_not_pass_through_elasticsearch_like_errors(): + e = Exception('I am from elasticsearch') + e.path = '/something/feed/_search' + assert located_error(e, [], []) != e diff --git a/tests/error/test_print_error.py b/tests/error/test_print_error.py new file mode 100644 index 00000000..a4c876a5 --- /dev/null +++ b/tests/error/test_print_error.py @@ -0,0 +1,70 @@ +from typing import cast + +from graphql.error import GraphQLError, print_error +from graphql.language import ( + parse, ObjectTypeDefinitionNode, Source, SourceLocation) +from graphql.pyutils import dedent + + +def describe_print_error(): + + # noinspection PyArgumentEqualDefault + def prints_line_numbers_with_correct_padding(): + single_digit = GraphQLError( + 'Single digit line number with no padding', None, + Source('*', 'Test', SourceLocation(9, 1)), [0]) + assert print_error(single_digit) == dedent(""" + Single digit line number with no padding + + Test (9:1) + 9: * + ^ + """) + + double_digit = GraphQLError( + 'Left padded first line number', None, + Source('*\n', 'Test', SourceLocation(9, 1)), [0]) + + assert print_error(double_digit) == dedent(""" + Left padded first line number + + Test (9:1) + 9: * + ^ + 10:\x20 + """) + + def prints_an_error_with_nodes_from_different_sources(): + source_a = parse(Source(dedent(""" + type Foo { + field: String + } + """), 'SourceA')) + field_type_a = cast( + ObjectTypeDefinitionNode, source_a.definitions[0]).fields[0].type + source_b = parse(Source(dedent(""" + type Foo { + field: Int + } + """), 'SourceB')) + field_type_b = cast( + ObjectTypeDefinitionNode, source_b.definitions[0]).fields[0].type + error = GraphQLError('Example error with two nodes', + [field_type_a, field_type_b]) + printed_error = print_error(error) + assert printed_error == dedent(""" + Example error with two nodes + + SourceA (2:10) + 1: type Foo { + 2: field: String + ^ + 3: } + + SourceB (2:10) + 1: type Foo { + 2: field: Int + ^ + 3: } + """) + assert str(error) == printed_error diff --git a/tests/execution/__init__.py b/tests/execution/__init__.py new file mode 100644 index 00000000..39aad3ed --- /dev/null +++ b/tests/execution/__init__.py @@ -0,0 +1 @@ +"""Tests for graphql.execution""" diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py new file mode 100644 index 00000000..8c6e2478 --- /dev/null +++ b/tests/execution/test_abstract.py @@ -0,0 +1,268 @@ +from collections import namedtuple + +from graphql import graphql_sync +from graphql.error import format_error +from graphql.type import ( + GraphQLBoolean, GraphQLField, GraphQLInterfaceType, + GraphQLList, GraphQLObjectType, GraphQLSchema, GraphQLString, + GraphQLUnionType) + +Dog = namedtuple('Dog', 'name woofs') +Cat = namedtuple('Cat', 'name meows') +Human = namedtuple('Human', 'name') + + +def get_is_type_of(type_): + def is_type_of(obj, _info): + return isinstance(obj, type_) + return is_type_of + + +def get_type_resolver(types): + def resolve(obj, _info): + return resolve_thunk(types).get(obj.__class__) + return resolve + + +def resolve_thunk(thunk): + return thunk() if callable(thunk) else thunk + + +def describe_execute_handles_synchronous_execution_of_abstract_types(): + + def is_type_of_used_to_resolve_runtime_type_for_interface(): + PetType = GraphQLInterfaceType('Pet', { + 'name': GraphQLField(GraphQLString)}) + + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType], + is_type_of=get_is_type_of(Dog)) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType], + is_type_of=get_is_type_of(Cat)) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_args: [ + Dog('Odie', True), Cat('Garfield', False)])}), + types=[CatType, DogType]) + + query = """ + { + pets { + name + ... on Dog { + woofs + } + ... on Cat { + meows + } + } + } + """ + + result = graphql_sync(schema, query) + assert result == ({'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}]}, None) + + def is_type_of_used_to_resolve_runtime_type_for_union(): + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}, + is_type_of=get_is_type_of(Dog)) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + is_type_of=get_is_type_of(Cat)) + + PetType = GraphQLUnionType('Pet', [CatType, DogType]) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_args: [ + Dog('Odie', True), Cat('Garfield', False)])})) + + query = """ + { + pets { + ... on Dog { + name + woofs + } + ... on Cat { + name + meows + } + } + } + """ + + result = graphql_sync(schema, query) + assert result == ({'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}]}, None) + + def resolve_type_on_interface_yields_useful_error(): + PetType = GraphQLInterfaceType('Pet', { + 'name': GraphQLField(GraphQLString)}, + resolve_type=get_type_resolver(lambda: { + Dog: DogType, Cat: CatType, Human: HumanType})) + + HumanType = GraphQLObjectType('Human', { + 'name': GraphQLField(GraphQLString)}) + + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType]) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType]) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_args: [ + Dog('Odie', True), Cat('Garfield', False), Human('Jon')])}), + types=[CatType, DogType]) + + query = """ + { + pets { + name + ... on Dog { + woofs + } + ... on Cat { + meows + } + } + } + """ + + result = graphql_sync(schema, query) + assert result.data == {'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}, None]} + + assert len(result.errors) == 1 + assert format_error(result.errors[0]) == { + 'message': "Runtime Object type 'Human'" + " is not a possible type for 'Pet'.", + 'locations': [(3, 15)], 'path': ['pets', 2]} + + def resolve_type_on_union_yields_useful_error(): + HumanType = GraphQLObjectType('Human', { + 'name': GraphQLField(GraphQLString)}) + + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}) + + PetType = GraphQLUnionType('Pet', [ + DogType, CatType], + resolve_type=get_type_resolver({ + Dog: DogType, Cat: CatType, Human: HumanType})) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_: [ + Dog('Odie', True), Cat('Garfield', False), Human('Jon')])})) + + query = """ + { + pets { + ... on Dog { + name + woofs + } + ... on Cat { + name + meows + } + } + } + """ + + result = graphql_sync(schema, query) + assert result.data == {'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}, None]} + + assert len(result.errors) == 1 + assert format_error(result.errors[0]) == { + 'message': "Runtime Object type 'Human'" + " is not a possible type for 'Pet'.", + 'locations': [(3, 15)], 'path': ['pets', 2]} + + def returning_invalid_value_from_resolve_type_yields_useful_error(): + fooInterface = GraphQLInterfaceType('FooInterface', { + 'bar': GraphQLField(GraphQLString)}, + resolve_type=lambda *_args: []) + + fooObject = GraphQLObjectType('FooObject', { + 'bar': GraphQLField(GraphQLString)}, + interfaces=[fooInterface]) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'foo': GraphQLField( + fooInterface, resolve=lambda *_args: 'dummy')}), + types=[fooObject]) + + result = graphql_sync(schema, '{ foo { bar } }') + + assert result == ({'foo': None}, [{ + 'message': + 'Abstract type FooInterface must resolve to an Object type' + " at runtime for field Query.foo with value 'dummy'," + " received '[]'. Either the FooInterface type should provide" + ' a "resolve_type" function or each possible type' + ' should provide an "is_type_of" function.', + 'locations': [(1, 3)], 'path': ['foo']}]) + + def resolve_type_allows_resolving_with_type_name(): + PetType = GraphQLInterfaceType('Pet', { + 'name': GraphQLField(GraphQLString)}, + resolve_type=get_type_resolver({ + Dog: 'Dog', Cat: 'Cat'})) + + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType]) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType]) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_: [ + Dog('Odie', True), Cat('Garfield', False)])}), + types=[CatType, DogType]) + + query = """ + { + pets { + name + ... on Dog { + woofs + } + ... on Cat { + meows + } + } + }""" + + result = graphql_sync(schema, query) + assert result == ({'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}]}, None) diff --git a/tests/execution/test_abstract_async.py b/tests/execution/test_abstract_async.py new file mode 100644 index 00000000..0d59528d --- /dev/null +++ b/tests/execution/test_abstract_async.py @@ -0,0 +1,300 @@ +from collections import namedtuple + +from pytest import mark + +from graphql import graphql +from graphql.error import format_error +from graphql.type import ( + GraphQLBoolean, GraphQLField, GraphQLInterfaceType, + GraphQLList, GraphQLObjectType, GraphQLSchema, GraphQLString, + GraphQLUnionType) + +Dog = namedtuple('Dog', 'name woofs') +Cat = namedtuple('Cat', 'name meows') +Human = namedtuple('Human', 'name') + + +async def is_type_of_error(*_args): + raise RuntimeError('We are testing this error') + + +def get_is_type_of(type_): + async def is_type_of(obj, _info): + return isinstance(obj, type_) + return is_type_of + + +def get_type_resolver(types): + async def resolve(obj, _info): + return resolve_thunk(types).get(obj.__class__) + return resolve + + +def resolve_thunk(thunk): + return thunk() if callable(thunk) else thunk + + +def describe_execute_handles_asynchronous_execution_of_abstract_types(): + + @mark.asyncio + async def is_type_of_used_to_resolve_runtime_type_for_interface(): + PetType = GraphQLInterfaceType('Pet', { + 'name': GraphQLField(GraphQLString)}) + + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType], + is_type_of=get_is_type_of(Dog)) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType], + is_type_of=get_is_type_of(Cat)) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_args: [ + Dog('Odie', True), Cat('Garfield', False)])}), + types=[CatType, DogType]) + + query = """ + { + pets { + name + ... on Dog { + woofs + } + ... on Cat { + meows + } + } + } + """ + + result = await graphql(schema, query) + assert result == ({'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}]}, None) + + @mark.asyncio + async def is_type_of_with_async_error(): + PetType = GraphQLInterfaceType('Pet', { + 'name': GraphQLField(GraphQLString)}) + + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType], + is_type_of=is_type_of_error) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType], + is_type_of=get_is_type_of(Cat)) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_args: [ + Dog('Odie', True), Cat('Garfield', False)])}), + types=[CatType, DogType]) + + query = """ + { + pets { + name + ... on Dog { + woofs + } + ... on Cat { + meows + } + } + } + """ + + result = await graphql(schema, query) + # Note: we get two errors, because first all types are resolved + # and only then they are checked sequentially + assert result.data == {'pets': [None, None]} + assert list(map(format_error, result.errors)) == [{ + 'message': 'We are testing this error', + 'locations': [(3, 15)], 'path': ['pets', 0]}, { + 'message': 'We are testing this error', + 'locations': [(3, 15)], 'path': ['pets', 1]}] + + @mark.asyncio + async def is_type_of_used_to_resolve_runtime_type_for_union(): + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}, + is_type_of=get_is_type_of(Dog)) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + is_type_of=get_is_type_of(Cat)) + + PetType = GraphQLUnionType('Pet', [CatType, DogType]) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_args: [ + Dog('Odie', True), Cat('Garfield', False)])})) + + query = """ + { + pets { + ... on Dog { + name + woofs + } + ... on Cat { + name + meows + } + } + } + """ + + result = await graphql(schema, query) + assert result == ({'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}]}, None) + + @mark.asyncio + async def resolve_type_on_interface_yields_useful_error(): + PetType = GraphQLInterfaceType('Pet', { + 'name': GraphQLField(GraphQLString)}, + resolve_type=get_type_resolver(lambda: { + Dog: DogType, Cat: CatType, Human: HumanType})) + + HumanType = GraphQLObjectType('Human', { + 'name': GraphQLField(GraphQLString)}) + + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType]) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType]) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_args: [ + Dog('Odie', True), Cat('Garfield', False), Human('Jon')])}), + types=[CatType, DogType]) + + query = """ + { + pets { + name + ... on Dog { + woofs + } + ... on Cat { + meows + } + } + } + """ + + result = await graphql(schema, query) + assert result.data == {'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}, None]} + + assert len(result.errors) == 1 + assert format_error(result.errors[0]) == { + 'message': "Runtime Object type 'Human'" + " is not a possible type for 'Pet'.", + 'locations': [(3, 15)], 'path': ['pets', 2]} + + @mark.asyncio + async def resolve_type_on_union_yields_useful_error(): + HumanType = GraphQLObjectType('Human', { + 'name': GraphQLField(GraphQLString)}) + + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}) + + PetType = GraphQLUnionType('Pet', [ + DogType, CatType], + resolve_type=get_type_resolver({ + Dog: DogType, Cat: CatType, Human: HumanType})) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_: [ + Dog('Odie', True), Cat('Garfield', False), Human('Jon')])})) + + query = """ + { + pets { + ... on Dog { + name + woofs + } + ... on Cat { + name + meows + } + } + } + """ + + result = await graphql(schema, query) + assert result.data == {'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}, None]} + + assert len(result.errors) == 1 + assert format_error(result.errors[0]) == { + 'message': "Runtime Object type 'Human'" + " is not a possible type for 'Pet'.", + 'locations': [(3, 15)], 'path': ['pets', 2]} + + @mark.asyncio + async def resolve_type_allows_resolving_with_type_name(): + PetType = GraphQLInterfaceType('Pet', { + 'name': GraphQLField(GraphQLString)}, + resolve_type=get_type_resolver({ + Dog: 'Dog', Cat: 'Cat'})) + + DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'woofs': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType]) + + CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + interfaces=[PetType]) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'pets': GraphQLField(GraphQLList(PetType), resolve=lambda *_: [ + Dog('Odie', True), Cat('Garfield', False)])}), + types=[CatType, DogType]) + + query = """ + { + pets { + name + ... on Dog { + woofs + } + ... on Cat { + meows + } + } + }""" + + result = await graphql(schema, query) + assert result == ({'pets': [ + {'name': 'Odie', 'woofs': True}, + {'name': 'Garfield', 'meows': False}]}, None) diff --git a/tests/execution/test_directives.py b/tests/execution/test_directives.py new file mode 100644 index 00000000..759e6b2a --- /dev/null +++ b/tests/execution/test_directives.py @@ -0,0 +1,220 @@ +from graphql import GraphQLSchema +from graphql.execution import execute +from graphql.language import parse +from graphql.type import GraphQLObjectType, GraphQLField, GraphQLString + +schema = GraphQLSchema(GraphQLObjectType('TestType', { + 'a': GraphQLField(GraphQLString), + 'b': GraphQLField(GraphQLString)})) + + +# noinspection PyMethodMayBeStatic +class Data: + + def a(self, *_args): + return 'a' + + def b(self, *_args): + return 'b' + + +def execute_test_query(doc): + return execute(schema, parse(doc), Data) + + +def describe_execute_handles_directives(): + + def describe_works_without_directives(): + + def basic_query_works(): + result = execute_test_query('{ a, b }') + assert result == ({'a': 'a', 'b': 'b'}, None) + + def describe_works_on_scalars(): + + def if_true_includes_scalar(): + result = execute_test_query('{ a, b @include(if: true) }') + assert result == ({'a': 'a', 'b': 'b'}, None) + + def if_false_omits_on_scalar(): + result = execute_test_query('{ a, b @include(if: false) }') + assert result == ({'a': 'a'}, None) + + def unless_false_includes_scalar(): + result = execute_test_query('{ a, b @skip(if: false) }') + assert result == ({'a': 'a', 'b': 'b'}, None) + + def unless_true_omits_scalar(): + result = execute_test_query('{ a, b @skip(if: true) }') + assert result == ({'a': 'a'}, None) + + def describe_works_on_fragment_spreads(): + + def if_false_omits_fragment_spread(): + result = execute_test_query(""" + query Q { + a + ...Frag @include(if: false) + } + fragment Frag on TestType { + b + } + """) + assert result == ({'a': 'a'}, None) + + def if_true_includes_fragment_spread(): + result = execute_test_query(""" + query Q { + a + ...Frag @include(if: true) + } + fragment Frag on TestType { + b + } + """) + assert result == ({'a': 'a', 'b': 'b'}, None) + + def unless_false_includes_fragment_spread(): + result = execute_test_query(""" + query Q { + a + ...Frag @skip(if: false) + } + fragment Frag on TestType { + b + } + """) + assert result == ({'a': 'a', 'b': 'b'}, None) + + def unless_true_omits_fragment_spread(): + result = execute_test_query(""" + query Q { + a + ...Frag @skip(if: true) + } + fragment Frag on TestType { + b + } + """) + assert result == ({'a': 'a'}, None) + + def describe_works_on_inline_fragment(): + + def if_false_omits_inline_fragment(): + result = execute_test_query(""" + query Q { + a + ... on TestType @include(if: false) { + b + } + } + """) + assert result == ({'a': 'a'}, None) + + def if_true_includes_inline_fragment(): + result = execute_test_query(""" + query Q { + a + ... on TestType @include(if: true) { + b + } + } + """) + assert result == ({'a': 'a', 'b': 'b'}, None) + + def unless_false_includes_inline_fragment(): + result = execute_test_query(""" + query Q { + a + ... on TestType @skip(if: false) { + b + } + } + """) + assert result == ({'a': 'a', 'b': 'b'}, None) + + def unless_true_omits_inline_fragment(): + result = execute_test_query(""" + query Q { + a + ... on TestType @skip(if: true) { + b + } + } + """) + assert result == ({'a': 'a'}, None) + + def describe_works_on_anonymous_inline_fragment(): + + def if_false_omits_anonymous_inline_fragment(): + result = execute_test_query(""" + query { + a + ... @include(if: false) { + b + } + } + """) + assert result == ({'a': 'a'}, None) + + def if_true_includes_anonymous_inline_fragment(): + result = execute_test_query(""" + query { + a + ... @include(if: true) { + b + } + } + """) + assert result == ({'a': 'a', 'b': 'b'}, None) + + def unless_false_includes_anonymous_inline_fragment(): + result = execute_test_query(""" + query { + a + ... @skip(if: false) { + b + } + } + """) + assert result == ({'a': 'a', 'b': 'b'}, None) + + def unless_true_omits_anonymous_inline_fragment(): + result = execute_test_query(""" + query { + a + ... @skip(if: true) { + b + } + } + """) + assert result == ({'a': 'a'}, None) + + def describe_works_with_skip_and_include_directives(): + + def include_and_no_skip(): + result = execute_test_query(""" + { + a + b @include(if: true) @skip(if: false) + } + """) + assert result == ({'a': 'a', 'b': 'b'}, None) + + def include_and_skip(): + result = execute_test_query(""" + { + a + b @include(if: true) @skip(if: true) + } + """) + assert result == ({'a': 'a'}, None) + + def no_include_or_skip(): + result = execute_test_query(""" + { + a + b @include(if: false) @skip(if: false) + } + """) + assert result == ({'a': 'a'}, None) diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py new file mode 100644 index 00000000..3e183b4d --- /dev/null +++ b/tests/execution/test_executor.py @@ -0,0 +1,675 @@ +import asyncio +from json import dumps +from typing import cast + +from pytest import raises, mark + +from graphql.error import GraphQLError +from graphql.execution import execute +from graphql.language import parse, OperationDefinitionNode, FieldNode +from graphql.type import ( + GraphQLSchema, GraphQLObjectType, GraphQLString, + GraphQLField, GraphQLArgument, GraphQLInt, GraphQLList, GraphQLNonNull, + GraphQLBoolean, GraphQLResolveInfo, ResponsePath) + + +def describe_execute_handles_basic_execution_tasks(): + + # noinspection PyTypeChecker + def throws_if_no_document_is_provided(): + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString)})) + + with raises(TypeError) as exc_info: + assert execute(schema, None) + + assert str(exc_info.value) == 'Must provide document' + + # noinspection PyTypeChecker + def throws_if_no_schema_is_provided(): + with raises(TypeError) as exc_info: + assert execute(schema=None, document=parse('{ field }')) + + assert str(exc_info.value) == 'Expected None to be a GraphQL schema.' + + def accepts_an_object_with_named_properties_as_arguments(): + doc = 'query Example { a }' + + data = 'rootValue' + + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString, + resolve=lambda root_value, *args: root_value)})) + + assert execute(schema, document=parse(doc), root_value=data) == ( + {'a': 'rootValue'}, None) + + @mark.asyncio + async def executes_arbitrary_code(): + + # noinspection PyMethodMayBeStatic,PyMethodMayBeStatic + class Data: + + def a(self, _info): + return 'Apple' + + def b(self, _info): + return 'Banana' + + def c(self, _info): + return 'Cookie' + + def d(self, _info): + return 'Donut' + + def e(self, _info): + return 'Egg' + + f = 'Fish' + + def pic(self, _info, size=50): + return f'Pic of size: {size}' + + def deep(self, _info): + return DeepData() + + def promise(self, _info): + return promise_data() + + # noinspection PyMethodMayBeStatic,PyMethodMayBeStatic + class DeepData: + + def a(self, _info): + return 'Already Been Done' + + def b(self, _info): + return 'Boring' + + def c(self, _info): + return ['Contrived', None, 'Confusing'] + + def deeper(self, _info): + return [Data(), None, Data()] + + async def promise_data(): + await asyncio.sleep(0) + return Data() + + doc = """ + query Example($size: Int) { + a, + b, + x: c + ...c + f + ...on DataType { + pic(size: $size) + promise { + a + } + } + deep { + a + b + c + deeper { + a + b + } + } + } + + fragment c on DataType { + d + e + } + """ + + ast = parse(doc) + expected = ({ + 'a': 'Apple', + 'b': 'Banana', + 'x': 'Cookie', + 'd': 'Donut', + 'e': 'Egg', + 'f': 'Fish', + 'pic': 'Pic of size: 100', + 'promise': {'a': 'Apple'}, + 'deep': { + 'a': 'Already Been Done', + 'b': 'Boring', + 'c': ['Contrived', None, 'Confusing'], + 'deeper': [ + {'a': 'Apple', 'b': 'Banana'}, + None, + {'a': 'Apple', 'b': 'Banana'} + ]}}, None) + + DataType = GraphQLObjectType('DataType', lambda: { + 'a': GraphQLField(GraphQLString), + 'b': GraphQLField(GraphQLString), + 'c': GraphQLField(GraphQLString), + 'd': GraphQLField(GraphQLString), + 'e': GraphQLField(GraphQLString), + 'f': GraphQLField(GraphQLString), + 'pic': GraphQLField(GraphQLString, args={ + 'size': GraphQLArgument(GraphQLInt)}, + resolve=lambda obj, info, size: obj.pic(info, size)), + 'deep': GraphQLField(DeepDataType), + 'promise': GraphQLField(DataType)}) + + DeepDataType = GraphQLObjectType('DeepDataType', { + 'a': GraphQLField(GraphQLString), + 'b': GraphQLField(GraphQLString), + 'c': GraphQLField(GraphQLList(GraphQLString)), + 'deeper': GraphQLList(DataType)}) + + schema = GraphQLSchema(DataType) + + assert await execute( + schema, ast, Data(), variable_values={'size': 100}, + operation_name='Example') == expected + + def merges_parallel_fragments(): + ast = parse(""" + { a, ...FragOne, ...FragTwo } + + fragment FragOne on Type { + b + deep { b, deeper: deep { b } } + } + + fragment FragTwo on Type { + c + deep { c, deeper: deep { c } } + } + """) + + Type = GraphQLObjectType('Type', lambda: { + 'a': GraphQLField(GraphQLString, resolve=lambda *_args: 'Apple'), + 'b': GraphQLField(GraphQLString, resolve=lambda *_args: 'Banana'), + 'c': GraphQLField(GraphQLString, resolve=lambda *_args: 'Cherry'), + 'deep': GraphQLField(Type, resolve=lambda *_args: {})}) + schema = GraphQLSchema(Type) + + assert execute(schema, ast) == ({ + 'a': 'Apple', 'b': 'Banana', 'c': 'Cherry', 'deep': { + 'b': 'Banana', 'c': 'Cherry', 'deeper': { + 'b': 'Banana', 'c': 'Cherry'}}}, None) + + def provides_info_about_current_execution_state(): + ast = parse('query ($var: String) { result: test }') + + infos = [] + + def resolve(_obj, info): + infos.append(info) + + schema = GraphQLSchema(GraphQLObjectType('Test', { + 'test': GraphQLField(GraphQLString, resolve=resolve)})) + + root_value = {'root': 'val'} + + execute(schema, ast, root_value, variable_values={'var': 'abc'}) + + assert len(infos) == 1 + operation = cast(OperationDefinitionNode, ast.definitions[0]) + field = cast(FieldNode, operation.selection_set.selections[0]) + assert infos[0] == GraphQLResolveInfo( + field_name='test', field_nodes=[field], + return_type=GraphQLString, parent_type=schema.query_type, + path=ResponsePath(None, 'result'), schema=schema, + fragments={}, root_value=root_value, operation=operation, + variable_values={'var': 'abc'}, context=None) + + def threads_root_value_context_correctly(): + doc = 'query Example { a }' + + class Data: + context_thing = 'thing' + + resolved_values = [] + + def resolve(obj, _info): + resolved_values.append(obj) + + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString, resolve=resolve)})) + + execute(schema, parse(doc), Data()) + + assert len(resolved_values) == 1 + assert resolved_values[0].context_thing == 'thing' + + def correctly_threads_arguments(): + doc = """ + query Example { + b(numArg: 123, stringArg: "foo") + } + """ + + resolved_args = [] + + def resolve(_obj, _info, **args): + resolved_args.append(args) + + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'b': GraphQLField(GraphQLString, args={ + 'numArg': GraphQLArgument(GraphQLInt), + 'stringArg': GraphQLArgument(GraphQLString)}, + resolve=resolve)})) + + execute(schema, parse(doc)) + + assert len(resolved_args) == 1 + assert resolved_args[0] == {'numArg': 123, 'stringArg': 'foo'} + + @mark.asyncio + async def nulls_out_error_subtrees(): + doc = """{ + syncOk + syncError + syncRawError + syncReturnError + syncReturnErrorList + asyncOk + asyncError + asyncRawError + asyncReturnError + asyncReturnErrorWithExtensions + }""" + + # noinspection PyPep8Naming,PyMethodMayBeStatic + class Data: + + def syncOk(self, _info): + return 'sync ok' + + def syncError(self, _info): + raise GraphQLError('Error getting syncError') + + def syncRawError(self, _info): + raise Exception('Error getting syncRawError') + + def syncReturnError(self, _info): + return Exception('Error getting syncReturnError') + + def syncReturnErrorList(self, _info): + return [ + 'sync0', + Exception('Error getting syncReturnErrorList1'), + 'sync2', + Exception('Error getting syncReturnErrorList3')] + + async def asyncOk(self, _info): + return 'async ok' + + async def asyncError(self, _info): + raise Exception('Error getting asyncError') + + async def asyncRawError(self, _info): + raise Exception('Error getting asyncRawError') + + async def asyncReturnError(self, _info): + return GraphQLError('Error getting asyncReturnError') + + async def asyncReturnErrorWithExtensions(self, _info): + return GraphQLError( + 'Error getting asyncReturnErrorWithExtensions', + extensions={'foo': 'bar'}) + + ast = parse(doc) + + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'syncOk': GraphQLField(GraphQLString), + 'syncError': GraphQLField(GraphQLString), + 'syncRawError': GraphQLField(GraphQLString), + 'syncReturnError': GraphQLField(GraphQLString), + 'syncReturnErrorList': GraphQLField(GraphQLList(GraphQLString)), + 'asyncOk': GraphQLField(GraphQLString), + 'asyncError': GraphQLField(GraphQLString), + 'asyncErrorWithExtensions': GraphQLField(GraphQLString), + 'asyncRawError': GraphQLField(GraphQLString), + 'asyncReturnError': GraphQLField(GraphQLString), + 'asyncReturnErrorWithExtensions': GraphQLField(GraphQLString)})) + + assert await execute(schema, ast, Data()) == ({ + 'syncOk': 'sync ok', + 'syncError': None, + 'syncRawError': None, + 'syncReturnError': None, + 'syncReturnErrorList': ['sync0', None, 'sync2', None], + 'asyncOk': 'async ok', + 'asyncError': None, + 'asyncRawError': None, + 'asyncReturnError': None, + 'asyncReturnErrorWithExtensions': None + }, [{ + 'message': 'Error getting syncError', + 'locations': [(3, 15)], 'path': ['syncError']}, { + 'message': 'Error getting syncRawError', + 'locations': [(4, 15)], 'path': ['syncRawError']}, { + 'message': 'Error getting syncReturnError', + 'locations': [(5, 15)], 'path': ['syncReturnError']}, { + 'message': 'Error getting syncReturnErrorList1', + 'locations': [(6, 15)], 'path': ['syncReturnErrorList', 1]}, { + 'message': 'Error getting syncReturnErrorList3', + 'locations': [(6, 15)], 'path': ['syncReturnErrorList', 3]}, { + 'message': 'Error getting asyncError', + 'locations': [(8, 15)], 'path': ['asyncError']}, { + 'message': 'Error getting asyncRawError', + 'locations': [(9, 15)], 'path': ['asyncRawError']}, { + 'message': 'Error getting asyncReturnError', + 'locations': [(10, 15)], 'path': ['asyncReturnError']}, { + 'message': 'Error getting asyncReturnErrorWithExtensions', + 'locations': [(11, 15)], + 'path': ['asyncReturnErrorWithExtensions'], + 'extensions': {'foo': 'bar'}}]) + + def full_response_path_is_included_for_non_nullable_fields(): + + def resolve_ok(*_args): + return {} + + def resolve_error(*_args): + raise Exception('Catch me if you can') + + A = GraphQLObjectType('A', lambda: { + 'nullableA': GraphQLField(A, resolve=resolve_ok), + 'nonNullA': GraphQLField(GraphQLNonNull(A), resolve=resolve_ok), + 'throws': GraphQLField(GraphQLNonNull(A), resolve=resolve_error)}) + + query_type = GraphQLObjectType('query', lambda: { + 'nullableA': GraphQLField(A, resolve=resolve_ok)}) + schema = GraphQLSchema(query_type) + + query = """ + query { + nullableA { + aliasedA: nullableA { + nonNullA { + anotherA: nonNullA { + throws + } + } + } + } + } + """ + + assert execute(schema, parse(query)) == ({ + 'nullableA': {'aliasedA': None} + }, [{ + 'message': 'Catch me if you can', + 'locations': [(7, 23)], 'path': [ + 'nullableA', 'aliasedA', 'nonNullA', 'anotherA', 'throws'] + }]) + + def uses_the_inline_operation_if_no_operation_name_is_provided(): + doc = '{ a }' + + class Data: + a = 'b' + + ast = parse(doc) + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString)})) + + assert execute(schema, ast, Data()) == ({'a': 'b'}, None) + + def uses_the_only_operation_if_no_operation_name_is_provided(): + doc = 'query Example { a }' + + class Data: + a = 'b' + + ast = parse(doc) + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString)})) + + assert execute(schema, ast, Data()) == ({'a': 'b'}, None) + + def uses_the_named_operation_if_operation_name_is_provided(): + doc = 'query Example { first: a } query OtherExample { second: a }' + + class Data: + a = 'b' + + ast = parse(doc) + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString)})) + + assert execute(schema, ast, Data(), operation_name='OtherExample') == ( + {'second': 'b'}, None) + + def provides_error_if_no_operation_is_provided(): + doc = 'fragment Example on Type { a }' + + class Data: + a = 'b' + + ast = parse(doc) + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString)})) + + assert execute(schema, ast, Data()) == (None, [{ + 'message': 'Must provide an operation.'}]) + + def errors_if_no_operation_name_is_provided_with_multiple_operations(): + doc = 'query Example { a } query OtherExample { a }' + + class Data: + a = 'b' + + ast = parse(doc) + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString)})) + + assert execute(schema, ast, Data()) == (None, [{ + 'message': 'Must provide operation name if query contains' + ' multiple operations.'}]) + + def errors_if_unknown_operation_name_is_provided(): + doc = 'query Example { a } query OtherExample { a }' + ast = parse(doc) + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString)})) + + assert execute(schema, ast, operation_name='UnknownExample') == ( + None, [{'message': "Unknown operation named 'UnknownExample'."}]) + + def uses_the_query_schema_for_queries(): + doc = 'query Q { a } mutation M { c } subscription S { a }' + + class Data: + a = 'b' + c = 'd' + + ast = parse(doc) + schema = GraphQLSchema( + GraphQLObjectType('Q', {'a': GraphQLField(GraphQLString)}), + GraphQLObjectType('M', {'c': GraphQLField(GraphQLString)}), + GraphQLObjectType('S', {'a': GraphQLField(GraphQLString)})) + + assert execute(schema, ast, Data(), operation_name='Q') == ( + {'a': 'b'}, None) + + def uses_the_mutation_schema_for_mutations(): + doc = 'query Q { a } mutation M { c }' + + class Data: + a = 'b' + c = 'd' + + ast = parse(doc) + schema = GraphQLSchema( + GraphQLObjectType('Q', {'a': GraphQLField(GraphQLString)}), + GraphQLObjectType('M', {'c': GraphQLField(GraphQLString)})) + + assert execute(schema, ast, Data(), operation_name='M') == ( + {'c': 'd'}, None) + + def uses_the_subscription_schema_for_subscriptions(): + doc = 'query Q { a } subscription S { a }' + + class Data: + a = 'b' + c = 'd' + + ast = parse(doc) + schema = GraphQLSchema( + query=GraphQLObjectType( + 'Q', {'a': GraphQLField(GraphQLString)}), + subscription=GraphQLObjectType( + 'S', {'a': GraphQLField(GraphQLString)})) + + assert execute(schema, ast, Data(), operation_name='S') == ( + {'a': 'b'}, None) + + @mark.asyncio + async def correct_field_ordering_despite_execution_order(): + doc = '{ a, b, c, d, e}' + + # noinspection PyMethodMayBeStatic,PyMethodMayBeStatic + class Data: + + def a(self, _info): + return 'a' + + async def b(self, _info): + return 'b' + + def c(self, _info): + return 'c' + + async def d(self, _info): + return 'd' + + def e(self, _info): + return 'e' + + ast = parse(doc) + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString), + 'b': GraphQLField(GraphQLString), + 'c': GraphQLField(GraphQLString), + 'd': GraphQLField(GraphQLString), + 'e': GraphQLField(GraphQLString)})) + + result = await execute(schema, ast, Data()) + + assert result == ( + {'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd', 'e': 'e'}, None) + + assert list(result.data) == ['a', 'b', 'c', 'd', 'e'] + + def avoids_recursion(): + doc = """ + query Q { + a + ...Frag + ...Frag + } + + fragment Frag on Type { + a, + ...Frag + } + """ + + class Data: + a = 'b' + + ast = parse(doc) + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'a': GraphQLField(GraphQLString)})) + + query_result = execute(schema, ast, Data(), operation_name='Q') + + assert query_result == ({'a': 'b'}, None) + + def does_not_include_illegal_fields_in_output(): + doc = 'mutation M { thisIsIllegalDontIncludeMe }' + ast = parse(doc) + schema = GraphQLSchema( + GraphQLObjectType('Q', {'a': GraphQLField(GraphQLString)}), + GraphQLObjectType('M', {'c': GraphQLField(GraphQLString)})) + + mutation_result = execute(schema, ast) + + assert mutation_result == ({}, None) + + def does_not_include_arguments_that_were_not_set(): + schema = GraphQLSchema(GraphQLObjectType('Type', { + 'field': GraphQLField(GraphQLString, args={ + 'a': GraphQLArgument(GraphQLBoolean), + 'b': GraphQLArgument(GraphQLBoolean), + 'c': GraphQLArgument(GraphQLBoolean), + 'd': GraphQLArgument(GraphQLInt), + 'e': GraphQLArgument(GraphQLInt)}, + resolve=lambda _source, _info, **args: args and dumps(args))})) + + query = parse('{ field(a: true, c: false, e: 0) }') + + assert execute(schema, query) == ( + {'field': '{"a": true, "c": false, "e": 0}'}, None) + + def fails_when_an_is_type_of_check_is_not_met(): + class Special: + # noinspection PyShadowingNames + def __init__(self, value): + self.value = value + + class NotSpecial: + # noinspection PyShadowingNames + def __init__(self, value): + self.value = value + + def __repr__(self): + return f'{self.__class__.__name__}({self.value!r})' + + SpecialType = GraphQLObjectType('SpecialType', { + 'value': GraphQLField(GraphQLString)}, + is_type_of=lambda obj, _info: isinstance(obj, Special)) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'specials': GraphQLField( + GraphQLList(SpecialType), + resolve=lambda root_value, *_args: root_value['specials'])})) + + query = parse('{ specials { value } }') + value = {'specials': [Special('foo'), NotSpecial('bar')]} + + assert execute(schema, query, value) == ({ + 'specials': [{'value': 'foo'}, None] + }, [{ + 'message': + "Expected value of type 'SpecialType' but got:" + " NotSpecial('bar').", + 'locations': [(1, 3)], 'path': ['specials', 1] + }]) + + def executes_ignoring_invalid_non_executable_definitions(): + query = parse(""" + { foo } + + type Query { bar: String } + """) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'foo': GraphQLField(GraphQLString)})) + + assert execute(schema, query) == ({'foo': None}, None) + + def uses_a_custom_field_resolver(): + query = parse('{ foo }') + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'foo': GraphQLField(GraphQLString)})) + + # For the purposes of test, just return the name of the field! + def custom_resolver(_source, info, **_args): + return info.field_name + + assert execute(schema, query, field_resolver=custom_resolver) == ( + {'foo': 'foo'}, None) diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py new file mode 100644 index 00000000..0a8eb515 --- /dev/null +++ b/tests/execution/test_lists.py @@ -0,0 +1,367 @@ +from collections import namedtuple +from gc import collect + +from pytest import mark + +from graphql.language import parse +from graphql.type import ( + GraphQLField, GraphQLInt, GraphQLList, + GraphQLNonNull, GraphQLObjectType, GraphQLSchema, GraphQLString) +from graphql.execution import execute + +Data = namedtuple('Data', 'test') + + +async def get_async(value): + return value + + +async def raise_async(msg): + raise RuntimeError(msg) + + +def get_response(test_type, test_data): + data = Data(test=test_data) + + data_type = GraphQLObjectType('DataType', lambda: { + 'test': GraphQLField(test_type), + 'nest': GraphQLField(data_type, resolve=lambda *_args: data)}) + + schema = GraphQLSchema(data_type) + + ast = parse('{ nest { test } }') + + return execute(schema, ast, data) + + +def check_response(response, expected): + if not response.errors: + response = response.data + assert response == expected + + +def check(test_type, test_data, expected): + + check_response(get_response(test_type, test_data), expected) + + +async def check_async(test_type, test_data, expected): + check_response(await get_response(test_type, test_data), expected) + + # Note: When Array values are rejected asynchronously, + # the remaining values may not be awaited any more. + # We manually run a garbage collection after each test so that + # these warnings appear immediately and can be filtered out. + collect() + + +def describe_execute_accepts_any_iterable_as_list_value(): + + def accepts_a_set_as_a_list_value(): + # We need to use a dict instead of a set, + # since sets are not ordered in Python. + check(GraphQLList(GraphQLString), dict.fromkeys( + ['apple', 'banana', 'coconut']), { + 'nest': {'test': ['apple', 'banana', 'coconut']}}) + + def accepts_a_generator_as_a_list_value(): + + def yield_items(): + yield 'one' + yield 2 + yield True + + check(GraphQLList(GraphQLString), yield_items(), { + 'nest': {'test': ['one', '2', 'true']}}) + + def accepts_function_arguments_as_a_list_value(): + + def get_args(*args): + return args # actually just a tuple, nothing special in Python + + check(GraphQLList(GraphQLString), get_args( + 'one', 'two'), {'nest': {'test': ['one', 'two']}}) + + def does_not_accept_iterable_string_literal_as_a_list_value(): + check(GraphQLList(GraphQLString), 'Singular', ( + {'nest': {'test': None}}, + [{'message': 'Expected Iterable,' + ' but did not find one for field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test']}])) + + +def describe_execute_handles_list_nullability(): + + def describe_list(): + type_ = GraphQLList(GraphQLInt) + + def describe_sync_list(): + + def contains_values(): + check(type_, [1, 2], {'nest': {'test': [1, 2]}}) + + def contains_null(): + check(type_, [1, None, 2], {'nest': {'test': [1, None, 2]}}) + + def returns_null(): + check(type_, None, {'nest': {'test': None}}) + + def describe_async_list(): + + @mark.asyncio + async def contains_values(): + await check_async(type_, get_async([1, 2]), { + 'nest': {'test': [1, 2]}}) + + @mark.asyncio + async def contains_null(): + await check_async(type_, get_async([1, None, 2]), { + 'nest': {'test': [1, None, 2]}}) + + @mark.asyncio + async def returns_null(): + await check_async(type_, get_async(None), { + 'nest': {'test': None}}) + + @mark.asyncio + async def async_error(): + await check_async(type_, raise_async('bad'), ( + {'nest': {'test': None}}, + [{'message': 'bad', + 'locations': [(1, 10)], 'path': ['nest', 'test']}])) + + def describe_list_async(): + + @mark.asyncio + async def contains_values(): + await check_async(type_, [get_async(1), get_async(2)], { + 'nest': {'test': [1, 2]}}) + + @mark.asyncio + async def contains_null(): + await check_async(type_, [ + get_async(1), get_async(None), get_async(2)], { + 'nest': {'test': [1, None, 2]}}) + + @mark.asyncio + async def contains_async_error(): + await check_async(type_, [ + get_async(1), raise_async('bad'), get_async(2)], ( + {'nest': {'test': [1, None, 2]}}, + [{'message': 'bad', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) + + def describe_not_null_list(): + type_ = GraphQLNonNull(GraphQLList(GraphQLInt)) + + def describe_sync_list(): + + def contains_values(): + check(type_, [1, 2], {'nest': {'test': [1, 2]}}) + + def contains_null(): + check(type_, [1, None, 2], {'nest': {'test': [1, None, 2]}}) + + def returns_null(): + check(type_, None, ( + {'nest': None}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test']}])) + + def describe_async_list(): + + @mark.asyncio + async def contains_values(): + await check_async(type_, get_async([1, 2]), { + 'nest': {'test': [1, 2]}}) + + @mark.asyncio + async def contains_null(): + await check_async(type_, get_async([1, None, 2]), { + 'nest': {'test': [1, None, 2]}}) + + @mark.asyncio + async def returns_null(): + await check_async(type_, get_async(None), ( + {'nest': None}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test']}])) + + @mark.asyncio + async def async_error(): + await check_async(type_, raise_async('bad'), ( + {'nest': None}, + [{'message': 'bad', + 'locations': [(1, 10)], 'path': ['nest', 'test']}])) + + def describe_list_async(): + + @mark.asyncio + async def contains_values(): + await check_async(type_, [get_async(1), get_async(2)], { + 'nest': {'test': [1, 2]}}) + + @mark.asyncio + async def contains_null(): + await check_async(type_, [ + get_async(1), get_async(None), get_async(2)], { + 'nest': {'test': [1, None, 2]}}) + + @mark.asyncio + async def contains_async_error(): + await check_async(type_, [ + get_async(1), raise_async('bad'), get_async(2)], ( + {'nest': {'test': [1, None, 2]}}, + [{'message': 'bad', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) + + def describe_list_not_null(): + type_ = GraphQLList(GraphQLNonNull(GraphQLInt)) + + def describe_sync_list(): + + def contains_values(): + check(type_, [1, 2], {'nest': {'test': [1, 2]}}) + + def contains_null(): + check(type_, [1, None, 2], ( + {'nest': {'test': None}}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) + + def returns_null(): + check(type_, None, {'nest': {'test': None}}) + + def describe_async_list(): + + @mark.asyncio + async def contains_values(): + await check_async(type_, get_async([1, 2]), { + 'nest': {'test': [1, 2]}}) + + @mark.asyncio + async def contains_null(): + await check_async(type_, get_async([1, None, 2]), ( + {'nest': {'test': None}}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) + + @mark.asyncio + async def returns_null(): + await check_async(type_, get_async(None), { + 'nest': {'test': None}}) + + @mark.asyncio + async def async_error(): + await check_async(type_, raise_async('bad'), ( + {'nest': {'test': None}}, + [{'message': 'bad', + 'locations': [(1, 10)], 'path': ['nest', 'test']}])) + + def describe_list_async(): + + @mark.asyncio + async def contains_values(): + await check_async(type_, [get_async(1), get_async(2)], { + 'nest': {'test': [1, 2]}}) + + @mark.asyncio + @mark.filterwarnings('ignore::RuntimeWarning') + async def contains_null(): + await check_async(type_, [ + get_async(1), get_async(None), get_async(2)], ( + {'nest': {'test': None}}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) + + @mark.asyncio + @mark.filterwarnings('ignore::RuntimeWarning') + async def contains_async_error(): + await check_async(type_, [ + get_async(1), raise_async('bad'), get_async(2)], ( + {'nest': {'test': None}}, + [{'message': 'bad', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) + + def describe_not_null_list_not_null(): + type_ = GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLInt))) + + def describe_sync_list(): + + def contains_values(): + check(type_, [1, 2], {'nest': {'test': [1, 2]}}) + + def contains_null(): + check(type_, [1, None, 2], ( + {'nest': None}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) + + def returns_null(): + check(type_, None, ( + {'nest': None}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test']}])) + + def describe_async_list(): + + @mark.asyncio + async def contains_values(): + await check_async(type_, get_async([1, 2]), { + 'nest': {'test': [1, 2]}}) + + @mark.asyncio + async def contains_null(): + await check_async(type_, get_async([1, None, 2]), ( + {'nest': None}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) + + @mark.asyncio + async def returns_null(): + await check_async(type_, get_async(None), ( + {'nest': None}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test']}])) + + @mark.asyncio + async def async_error(): + await check_async(type_, raise_async('bad'), ( + {'nest': None}, + [{'message': 'bad', + 'locations': [(1, 10)], 'path': ['nest', 'test']}])) + + def describe_list_async(): + + @mark.asyncio + async def contains_values(): + await check_async(type_, [get_async(1), get_async(2)], { + 'nest': {'test': [1, 2]}}) + + @mark.asyncio + @mark.filterwarnings('ignore::RuntimeWarning') + async def contains_null(): + await check_async(type_, [ + get_async(1), get_async(None), get_async(2)], ( + {'nest': None}, + [{'message': 'Cannot return null' + ' for non-nullable field DataType.test.', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) + + @mark.asyncio + @mark.filterwarnings('ignore::RuntimeWarning') + async def contains_async_error(): + await check_async(type_, [ + get_async(1), raise_async('bad'), get_async(2)], ( + {'nest': None}, + [{'message': 'bad', + 'locations': [(1, 10)], 'path': ['nest', 'test', 1]}])) diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py new file mode 100644 index 00000000..4e922a55 --- /dev/null +++ b/tests/execution/test_mutations.py @@ -0,0 +1,158 @@ +import asyncio + +from pytest import mark + +from graphql.execution import execute +from graphql.language import parse +from graphql.type import ( + GraphQLArgument, GraphQLField, GraphQLInt, + GraphQLObjectType, GraphQLSchema) + + +# noinspection PyPep8Naming +class NumberHolder: + + theNumber: int + + def __init__(self, originalNumber: int): + self.theNumber = originalNumber + + +# noinspection PyPep8Naming +class Root: + + numberHolder: NumberHolder + + def __init__(self, originalNumber: int): + self.numberHolder = NumberHolder(originalNumber) + + def immediately_change_the_number(self, newNumber: int) -> NumberHolder: + self.numberHolder.theNumber = newNumber + return self.numberHolder + + async def promise_to_change_the_number( + self, new_number: int) -> NumberHolder: + await asyncio.sleep(0) + return self.immediately_change_the_number(new_number) + + def fail_to_change_the_number(self, newNumber: int): + raise RuntimeError(f'Cannot change the number to {newNumber}') + + async def promise_and_fail_to_change_the_number(self, newNumber: int): + await asyncio.sleep(0) + self.fail_to_change_the_number(newNumber) + + +numberHolderType = GraphQLObjectType('NumberHolder', { + 'theNumber': GraphQLField(GraphQLInt)}) + +# noinspection PyPep8Naming +schema = GraphQLSchema( + GraphQLObjectType('Query', { + 'numberHolder': GraphQLField(numberHolderType)}), + GraphQLObjectType('Mutation', { + 'immediatelyChangeTheNumber': GraphQLField( + numberHolderType, + args={'newNumber': GraphQLArgument(GraphQLInt)}, + resolve=lambda obj, _info, newNumber: + obj.immediately_change_the_number(newNumber)), + 'promiseToChangeTheNumber': GraphQLField( + numberHolderType, + args={'newNumber': GraphQLArgument(GraphQLInt)}, + resolve=lambda obj, _info, newNumber: + obj.promise_to_change_the_number(newNumber)), + 'failToChangeTheNumber': GraphQLField( + numberHolderType, + args={'newNumber': GraphQLArgument(GraphQLInt)}, + resolve=lambda obj, _info, newNumber: + obj.fail_to_change_the_number(newNumber)), + 'promiseAndFailToChangeTheNumber': GraphQLField( + numberHolderType, + args={'newNumber': GraphQLArgument(GraphQLInt)}, + resolve=lambda obj, _info, newNumber: + obj.promise_and_fail_to_change_the_number(newNumber))})) + + +def describe_execute_handles_mutation_execution_ordering(): + + @mark.asyncio + async def evaluates_mutations_serially(): + doc = """ + mutation M { + first: immediatelyChangeTheNumber(newNumber: 1) { + theNumber + }, + second: promiseToChangeTheNumber(newNumber: 2) { + theNumber + }, + third: immediatelyChangeTheNumber(newNumber: 3) { + theNumber + } + fourth: promiseToChangeTheNumber(newNumber: 4) { + theNumber + }, + fifth: immediatelyChangeTheNumber(newNumber: 5) { + theNumber + } + } + """ + + mutation_result = await execute(schema, parse(doc), Root(6)) + + assert mutation_result == ({ + 'first': {'theNumber': 1}, + 'second': {'theNumber': 2}, + 'third': {'theNumber': 3}, + 'fourth': {'theNumber': 4}, + 'fifth': {'theNumber': 5} + }, None) + + @mark.asyncio + async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): + doc = """ + mutation M { + first: immediatelyChangeTheNumber(newNumber: 1) { + theNumber + }, + second: promiseToChangeTheNumber(newNumber: 2) { + theNumber + }, + third: failToChangeTheNumber(newNumber: 3) { + theNumber + } + fourth: promiseToChangeTheNumber(newNumber: 4) { + theNumber + }, + fifth: immediatelyChangeTheNumber(newNumber: 5) { + theNumber + } + sixth: promiseAndFailToChangeTheNumber(newNumber: 6) { + theNumber + } + } + """ + + result = await execute(schema, parse(doc), Root(6)) + + assert result == ({ + 'first': { + 'theNumber': 1, + }, + 'second': { + 'theNumber': 2, + }, + 'third': None, + 'fourth': { + 'theNumber': 4, + }, + 'fifth': { + 'theNumber': 5, + }, + 'sixth': None + }, [{ + 'message': 'Cannot change the number to 3', + 'locations': [(9, 15)], 'path': ['third'] + }, { + 'message': 'Cannot change the number to 6', + 'locations': [(18, 15)], 'path': ['sixth'] + }]) diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py new file mode 100644 index 00000000..c33f0a28 --- /dev/null +++ b/tests/execution/test_nonnull.py @@ -0,0 +1,509 @@ +import re +from inspect import isawaitable +from pytest import fixture, mark + +from graphql.execution import execute +from graphql.language import parse +from graphql.type import ( + GraphQLArgument, GraphQLField, GraphQLNonNull, GraphQLObjectType, + GraphQLSchema, GraphQLString) + +sync_error = RuntimeError('sync') +sync_non_null_error = RuntimeError('syncNonNull') +promise_error = RuntimeError('promise') +promise_non_null_error = RuntimeError('promiseNonNull') + + +# noinspection PyPep8Naming,PyMethodMayBeStatic +class ThrowingData: + + def sync(self, _info): + raise sync_error + + def syncNonNull(self, _info): + raise sync_non_null_error + + async def promise(self, _info): + raise promise_error + + async def promiseNonNull(self, _info): + raise promise_non_null_error + + def syncNest(self, _info): + return ThrowingData() + + def syncNonNullNest(self, _info): + return ThrowingData() + + async def promiseNest(self, _info): + return ThrowingData() + + async def promiseNonNullNest(self, _info): + return ThrowingData() + + +# noinspection PyPep8Naming,PyMethodMayBeStatic +class NullingData: + + def sync(self, _info): + return None + + def syncNonNull(self, _info): + return None + + async def promise(self, _info): + return None + + async def promiseNonNull(self, _info): + return None + + def syncNest(self, _info): + return NullingData() + + def syncNonNullNest(self, _info): + return NullingData() + + async def promiseNest(self, _info): + return NullingData() + + async def promiseNonNullNest(self, _info): + return NullingData() + + +DataType = GraphQLObjectType('DataType', lambda: { + 'sync': GraphQLField(GraphQLString), + 'syncNonNull': GraphQLField(GraphQLNonNull(GraphQLString)), + 'promise': GraphQLField(GraphQLString), + 'promiseNonNull': GraphQLField(GraphQLNonNull(GraphQLString)), + 'syncNest': GraphQLField(DataType), + 'syncNonNullNest': GraphQLField(GraphQLNonNull(DataType)), + 'promiseNest': GraphQLField(DataType), + 'promiseNonNullNest': GraphQLField(GraphQLNonNull(DataType))}) + +schema = GraphQLSchema(DataType) + + +def execute_query(query, root_value): + return execute(schema, parse(query), root_value) + + +def patch(data): + return re.sub(r'\bsyncNonNull\b', 'promiseNonNull', re.sub( + r'\bsync\b', 'promise', data)) + + +async def execute_sync_and_async(query, root_value): + sync_result = execute_query(query, root_value) + if isawaitable(sync_result): + sync_result = await sync_result + async_result = await execute_query(patch(query), root_value) + + assert repr(async_result) == patch(repr(sync_result)) + return sync_result + + +def describe_execute_handles_non_nullable_types(): + + def describe_nulls_a_nullable_field(): + query = """ + { + sync + } + """ + + @mark.asyncio + async def returns_null(): + result = await execute_sync_and_async(query, NullingData()) + assert result == ({'sync': None}, None) + + @mark.asyncio + async def throws(): + result = await execute_sync_and_async(query, ThrowingData()) + assert result == ({'sync': None}, [{ + 'message': str(sync_error), + 'path': ['sync'], 'locations': [(3, 15)]}]) + + def describe_nulls_an_immediate_object_that_contains_a_non_null_field(): + + query = """ + { + syncNest { + syncNonNull, + } + } + """ + + @mark.asyncio + async def returns_null(): + result = await execute_sync_and_async(query, NullingData()) + assert result == ({'syncNest': None}, [{ + 'message': 'Cannot return null for non-nullable field' + ' DataType.syncNonNull.', + 'path': ['syncNest', 'syncNonNull'], + 'locations': [(4, 17)]}]) + + @mark.asyncio + async def throws(): + result = await execute_sync_and_async(query, ThrowingData()) + assert result == ({'syncNest': None}, [{ + 'message': str(sync_non_null_error), + 'path': ['syncNest', 'syncNonNull'], + 'locations': [(4, 17)]}]) + + def describe_nulls_a_promised_object_that_contains_a_non_null_field(): + query = """ + { + promiseNest { + syncNonNull, + } + } + """ + + @mark.asyncio + async def returns_null(): + result = await execute_sync_and_async(query, NullingData()) + assert result == ({'promiseNest': None}, [{ + 'message': 'Cannot return null for non-nullable field' + ' DataType.syncNonNull.', + 'path': ['promiseNest', 'syncNonNull'], + 'locations': [(4, 17)]}]) + + @mark.asyncio + async def throws(): + result = await execute_sync_and_async(query, ThrowingData()) + assert result == ({'promiseNest': None}, [{ + 'message': str(sync_non_null_error), + 'path': ['promiseNest', 'syncNonNull'], + 'locations': [(4, 17)]}]) + + def describe_nulls_a_complex_tree_of_nullable_fields_each(): + query = """ + { + syncNest { + sync + promise + syncNest { sync promise } + promiseNest { sync promise } + } + promiseNest { + sync + promise + syncNest { sync promise } + promiseNest { sync promise } + } + } + """ + data = { + 'syncNest': { + 'sync': None, + 'promise': None, + 'syncNest': {'sync': None, 'promise': None}, + 'promiseNest': {'sync': None, 'promise': None}}, + 'promiseNest': { + 'sync': None, + 'promise': None, + 'syncNest': {'sync': None, 'promise': None}, + 'promiseNest': {'sync': None, 'promise': None}}} + + @mark.asyncio + async def returns_null(): + result = await execute_query(query, NullingData()) + assert result == (data, None) + + @mark.asyncio + async def throws(): + result = await execute_query(query, ThrowingData()) + assert result == (data, [{ + 'message': str(sync_error), + 'path': ['syncNest', 'sync'], + 'locations': [(4, 17)] + }, { + 'message': str(sync_error), + 'path': ['syncNest', 'syncNest', 'sync'], + 'locations': [(6, 28)] + }, { + 'message': str(promise_error), + 'path': ['syncNest', 'promise'], + 'locations': [(5, 17)] + }, { + 'message': str(promise_error), + 'path': ['syncNest', 'syncNest', 'promise'], + 'locations': [(6, 33)] + }, { + 'message': str(sync_error), + 'path': ['syncNest', 'promiseNest', 'sync'], + 'locations': [(7, 31)] + }, { + 'message': str(promise_error), + 'path': ['syncNest', 'promiseNest', 'promise'], + 'locations': [(7, 36)] + }, { + 'message': str(sync_error), + 'path': ['promiseNest', 'sync'], + 'locations': [(10, 17)] + }, { + 'message': str(sync_error), + 'path': ['promiseNest', 'syncNest', 'sync'], + 'locations': [(12, 28)] + }, { + 'message': str(promise_error), + 'path': ['promiseNest', 'promise'], + 'locations': [(11, 17)] + }, { + 'message': str(promise_error), + 'path': ['promiseNest', 'syncNest', 'promise'], + 'locations': [(12, 33)] + }, { + 'message': str(sync_error), + 'path': ['promiseNest', 'promiseNest', 'sync'], + 'locations': [(13, 31)] + }, { + 'message': str(promise_error), + 'path': ['promiseNest', 'promiseNest', 'promise'], + 'locations': [(13, 36)] + }]) + + def describe_nulls_first_nullable_after_long_chain_of_non_null_fields(): + query = """ + { + syncNest { + syncNonNullNest { + promiseNonNullNest { + syncNonNullNest { + promiseNonNullNest { + syncNonNull + } + } + } + } + } + promiseNest { + syncNonNullNest { + promiseNonNullNest { + syncNonNullNest { + promiseNonNullNest { + syncNonNull + } + } + } + } + } + anotherNest: syncNest { + syncNonNullNest { + promiseNonNullNest { + syncNonNullNest { + promiseNonNullNest { + promiseNonNull + } + } + } + } + } + anotherPromiseNest: promiseNest { + syncNonNullNest { + promiseNonNullNest { + syncNonNullNest { + promiseNonNullNest { + promiseNonNull + } + } + } + } + } + } + """ + data = { + 'syncNest': None, + 'promiseNest': None, + 'anotherNest': None, + 'anotherPromiseNest': None} + + @mark.asyncio + async def returns_null(): + result = await execute_query(query, NullingData()) + assert result == (data, [{ + 'message': 'Cannot return null for non-nullable field' + ' DataType.syncNonNull.', + 'path': [ + 'syncNest', 'syncNonNullNest', 'promiseNonNullNest', + 'syncNonNullNest', 'promiseNonNullNest', 'syncNonNull'], + 'locations': [(8, 25)] + }, { + 'message': 'Cannot return null for non-nullable field' + ' DataType.syncNonNull.', + 'path': [ + 'promiseNest', 'syncNonNullNest', 'promiseNonNullNest', + 'syncNonNullNest', 'promiseNonNullNest', 'syncNonNull'], + 'locations': [(19, 25)] + + }, { + 'message': 'Cannot return null for non-nullable field' + ' DataType.promiseNonNull.', + 'path': [ + 'anotherNest', 'syncNonNullNest', 'promiseNonNullNest', + 'syncNonNullNest', 'promiseNonNullNest', 'promiseNonNull'], + 'locations': [(30, 25)] + }, { + 'message': 'Cannot return null for non-nullable field' + ' DataType.promiseNonNull.', + 'path': [ + 'anotherPromiseNest', 'syncNonNullNest', + 'promiseNonNullNest', 'syncNonNullNest', + 'promiseNonNullNest', 'promiseNonNull'], + 'locations': [(41, 25)] + }]) + + @mark.asyncio + async def throws(): + result = await execute_query(query, ThrowingData()) + assert result == (data, [{ + 'message': str(sync_non_null_error), + 'path': [ + 'syncNest', 'syncNonNullNest', 'promiseNonNullNest', + 'syncNonNullNest', 'promiseNonNullNest', 'syncNonNull'], + 'locations': [(8, 25)] + }, { + 'message': str(sync_non_null_error), + 'path': [ + 'promiseNest', 'syncNonNullNest', 'promiseNonNullNest', + 'syncNonNullNest', 'promiseNonNullNest', 'syncNonNull'], + 'locations': [(19, 25)] + + }, { + 'message': str(promise_non_null_error), + 'path': [ + 'anotherNest', 'syncNonNullNest', 'promiseNonNullNest', + 'syncNonNullNest', 'promiseNonNullNest', 'promiseNonNull'], + 'locations': [(30, 25)] + }, { + 'message': str(promise_non_null_error), + 'path': [ + 'anotherPromiseNest', 'syncNonNullNest', + 'promiseNonNullNest', 'syncNonNullNest', + 'promiseNonNullNest', 'promiseNonNull'], + 'locations': [(41, 25)] + }]) + + def describe_nulls_the_top_level_if_non_nullable_field(): + query = """ + { + syncNonNull + } + """ + + @mark.asyncio + async def returns_null(): + result = await execute_sync_and_async(query, NullingData()) + assert result == (None, [{ + 'message': 'Cannot return null for non-nullable field' + ' DataType.syncNonNull.', + 'path': ['syncNonNull'], 'locations': [(3, 17)]}]) + + @mark.asyncio + async def throws(): + result = await execute_sync_and_async(query, ThrowingData()) + assert result == (None, [{ + 'message': str(sync_non_null_error), + 'path': ['syncNonNull'], 'locations': [(3, 17)]}]) + + def describe_handles_non_null_argument(): + + # noinspection PyPep8Naming + @fixture + def resolve(_obj, _info, cannotBeNull): + if isinstance(cannotBeNull, str): + return f'Passed: {cannotBeNull}' + + schema_with_non_null_arg = GraphQLSchema( + GraphQLObjectType('Query', { + 'withNonNullArg': GraphQLField(GraphQLString, args={ + 'cannotBeNull': + GraphQLArgument(GraphQLNonNull(GraphQLString)) + }, resolve=resolve)})) + + def succeeds_when_passed_non_null_literal_value(): + result = execute(schema_with_non_null_arg, parse(""" + query { + withNonNullArg (cannotBeNull: "literal value") + } + """)) + + assert result == ( + {'withNonNullArg': 'Passed: literal value'}, None) + + def succeeds_when_passed_non_null_variable_value(): + result = execute(schema_with_non_null_arg, parse(""" + query ($testVar: String = "default value") { + withNonNullArg (cannotBeNull: $testVar) + } + """), variable_values={}) # intentionally missing variable + + assert result == ( + {'withNonNullArg': 'Passed: default value'}, None) + + def field_error_when_missing_non_null_arg(): + # Note: validation should identify this issue first + # (missing args rule) however execution should still + # protect against this. + result = execute(schema_with_non_null_arg, parse(""" + query { + withNonNullArg + } + """)) + + assert result == ( + {'withNonNullArg': None}, [{ + 'message': "Argument 'cannotBeNull' of required type" + " 'String!' was not provided.", + 'locations': [(3, 19)], 'path': ['withNonNullArg'] + }]) + + def field_error_when_non_null_arg_provided_null(): + # Note: validation should identify this issue first + # (values of correct type rule) however execution + # should still protect against this. + result = execute(schema_with_non_null_arg, parse(""" + query { + withNonNullArg(cannotBeNull: null) + } + """)) + + assert result == ( + {'withNonNullArg': None}, [{ + 'message': "Argument 'cannotBeNull' of non-null type" + " 'String!' must not be null.", + 'locations': [(3, 48)], 'path': ['withNonNullArg'] + }]) + + def field_error_when_non_null_arg_not_provided_variable_value(): + # Note: validation should identify this issue first + # (variables in allowed position rule) however execution + # should still protect against this. + result = execute(schema_with_non_null_arg, parse(""" + query ($testVar: String) { + withNonNullArg(cannotBeNull: $testVar) + } + """), variable_values={}) # intentionally missing variable + + assert result == ( + {'withNonNullArg': None}, [{ + 'message': "Argument 'cannotBeNull' of required type" + " 'String!' was provided the variable" + " '$testVar' which was not provided" + ' a runtime value.', + 'locations': [(3, 48)], 'path': ['withNonNullArg'] + }]) + + def field_error_when_non_null_arg_provided_explicit_null_variable(): + result = execute(schema_with_non_null_arg, parse(""" + query ($testVar: String = "default value") { + withNonNullArg (cannotBeNull: $testVar) + } + """), variable_values={'testVar': None}) + + assert result == ( + {'withNonNullArg': None}, [{ + 'message': "Argument 'cannotBeNull' of non-null type" + " 'String!' must not be null.", + 'locations': [(3, 49)], 'path': ['withNonNullArg'] + }]) diff --git a/tests/execution/test_resolve.py b/tests/execution/test_resolve.py new file mode 100644 index 00000000..5010b4a8 --- /dev/null +++ b/tests/execution/test_resolve.py @@ -0,0 +1,85 @@ +from json import dumps + +from pytest import fixture + +from graphql import graphql_sync +from graphql.type import ( + GraphQLArgument, GraphQLField, GraphQLInt, + GraphQLObjectType, GraphQLSchema, GraphQLString) + + +def describe_execute_resolve_function(): + + @fixture + def test_schema(test_field): + return GraphQLSchema(GraphQLObjectType('Query', {'test': test_field})) + + def default_function_accesses_attributes(): + schema = test_schema(GraphQLField(GraphQLString)) + + class Source: + test = 'testValue' + + assert graphql_sync(schema, '{ test }', Source()) == ( + {'test': 'testValue'}, None) + + def default_function_accesses_keys(): + schema = test_schema(GraphQLField(GraphQLString)) + + source = {'test': 'testValue'} + + assert graphql_sync(schema, '{ test }', source) == ( + {'test': 'testValue'}, None) + + def default_function_calls_methods(): + schema = test_schema(GraphQLField(GraphQLString)) + + class Source: + _secret = 'testValue' + + def test(self, _info): + return self._secret + + assert graphql_sync(schema, '{ test }', Source()) == ( + {'test': 'testValue'}, None) + + def default_function_passes_args_and_context(): + schema = test_schema(GraphQLField(GraphQLInt, args={ + 'addend1': GraphQLArgument(GraphQLInt)})) + + class Adder: + def __init__(self, num): + self._num = num + + def test(self, info, addend1): + return self._num + addend1 + info.context.addend2 + + source = Adder(700) + + class Context: + addend2 = 9 + + assert graphql_sync( + schema, '{ test(addend1: 80) }', source, Context()) == ( + {'test': 789}, None) + + def uses_provided_resolve_function(): + schema = test_schema(GraphQLField( + GraphQLString, args={ + 'aStr': GraphQLArgument(GraphQLString), + 'aInt': GraphQLArgument(GraphQLInt)}, + resolve=lambda source, info, **args: dumps([source, args]))) + + assert graphql_sync(schema, '{ test }') == ( + {'test': '[null, {}]'}, None) + + assert graphql_sync(schema, '{ test }', 'Source!') == ( + {'test': '["Source!", {}]'}, None) + + assert graphql_sync( + schema, '{ test(aStr: "String!") }', 'Source!') == ( + {'test': '["Source!", {"aStr": "String!"}]'}, None) + + assert graphql_sync( + schema, '{ test(aInt: -123, aStr: "String!") }', 'Source!') == ( + {'test': '["Source!", {"aStr": "String!", "aInt": -123}]'}, None) diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py new file mode 100644 index 00000000..55d4fcf4 --- /dev/null +++ b/tests/execution/test_schema.py @@ -0,0 +1,145 @@ +from graphql.execution import execute +from graphql.language import parse +from graphql.type import ( + GraphQLArgument, GraphQLBoolean, GraphQLField, GraphQLID, + GraphQLInt, GraphQLList, GraphQLNonNull, GraphQLObjectType, + GraphQLSchema, GraphQLString) + + +def describe_execute_handles_execution_with_a_complex_schema(): + + def executes_using_a_schema(): + BlogImage = GraphQLObjectType('Image', { + 'url': GraphQLField(GraphQLString), + 'width': GraphQLField(GraphQLInt), + 'height': GraphQLField(GraphQLInt)}) + + BlogAuthor = GraphQLObjectType('Author', lambda: { + 'id': GraphQLField(GraphQLString), + 'name': GraphQLField(GraphQLString), + 'pic': GraphQLField(BlogImage, args={ + 'width': GraphQLArgument(GraphQLInt), + 'height': GraphQLArgument(GraphQLInt)}, + resolve=lambda obj, info, width, height: + obj.pic(info, width, height)), + 'recentArticle': GraphQLField(BlogArticle)}) + + BlogArticle = GraphQLObjectType('Article', { + 'id': GraphQLField(GraphQLNonNull(GraphQLString)), + 'isPublished': GraphQLField(GraphQLBoolean), + 'author': GraphQLField(BlogAuthor), + 'title': GraphQLField(GraphQLString), + 'body': GraphQLField(GraphQLString), + 'keywords': GraphQLField(GraphQLList(GraphQLString))}) + + # noinspection PyShadowingBuiltins + BlogQuery = GraphQLObjectType('Query', { + 'article': GraphQLField( + BlogArticle, args={'id': GraphQLArgument(GraphQLID)}, + resolve=lambda obj, info, id: Article(id)), + 'feed': GraphQLField( + GraphQLList(BlogArticle), + resolve=lambda *_args: [Article(n + 1) for n in range(10)])}) + + BlogSchema = GraphQLSchema(BlogQuery) + + class Article: + + # noinspection PyShadowingBuiltins + def __init__(self, id): + self.id = id + self.isPublished = True + self.author = JohnSmith() + self.title = f'My Article {id}' + self.body = 'This is a post' + self.hidden = 'This data is not exposed in the schema' + self.keywords = ['foo', 'bar', 1, True, None] + + # noinspection PyPep8Naming,PyMethodMayBeStatic + class Author: + + def pic(self, info_, width, height): + return Pic(123, width, height) + + @property + def recentArticle(self): + return Article(1) + + class JohnSmith(Author): + id = 123 + name = 'John Smith' + + class Pic: + + def __init__(self, uid, width, height): + self.url = f'cdn://{uid}' + self.width = f'{width}' + self.height = f'{height}' + + request = """ + { + feed { + id, + title + }, + article(id: "1") { + ...articleFields, + author { + id, + name, + pic(width: 640, height: 480) { + url, + width, + height + }, + recentArticle { + ...articleFields, + keywords + } + } + } + } + + fragment articleFields on Article { + id, + isPublished, + title, + body, + hidden, + notdefined + } + """ + + # Note: this is intentionally not validating to ensure appropriate + # behavior occurs when executing an invalid query. + assert execute(BlogSchema, parse(request)) == ({ + 'feed': [ + {'id': '1', 'title': 'My Article 1'}, + {'id': '2', 'title': 'My Article 2'}, + {'id': '3', 'title': 'My Article 3'}, + {'id': '4', 'title': 'My Article 4'}, + {'id': '5', 'title': 'My Article 5'}, + {'id': '6', 'title': 'My Article 6'}, + {'id': '7', 'title': 'My Article 7'}, + {'id': '8', 'title': 'My Article 8'}, + {'id': '9', 'title': 'My Article 9'}, + {'id': '10', 'title': 'My Article 10'}], + 'article': { + 'id': '1', + 'isPublished': True, + 'title': 'My Article 1', + 'body': 'This is a post', + 'author': { + 'id': '123', + 'name': 'John Smith', + 'pic': { + 'url': 'cdn://123', + 'width': 640, + 'height': 480}, + 'recentArticle': { + 'id': '1', + 'isPublished': True, + 'title': 'My Article 1', + 'body': 'This is a post', + 'keywords': ['foo', 'bar', '1', 'true', None]}}}}, + None) diff --git a/tests/execution/test_sync.py b/tests/execution/test_sync.py new file mode 100644 index 00000000..36f70b25 --- /dev/null +++ b/tests/execution/test_sync.py @@ -0,0 +1,82 @@ +from inspect import isawaitable + +from pytest import fixture, mark, raises + +from graphql import graphql_sync +from graphql.execution import execute +from graphql.language import parse +from graphql.type import ( + GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString) + + +def describe_execute_synchronously_when_possible(): + + @fixture + def resolve_sync(root_value, info_): + return root_value + + @fixture + async def resolve_async(root_value, info_): + return root_value + + schema = GraphQLSchema( + GraphQLObjectType('Query', { + 'syncField': GraphQLField(GraphQLString, resolve=resolve_sync), + 'asyncField': GraphQLField(GraphQLString, resolve=resolve_async)}), + GraphQLObjectType('Mutation', { + 'syncMutationField': GraphQLField( + GraphQLString, resolve=resolve_sync)})) + + def does_not_return_a_promise_for_initial_errors(): + doc = 'fragment Example on Query { syncField }' + assert execute(schema, parse(doc), 'rootValue') == ( + None, [{'message': 'Must provide an operation.'}]) + + def does_not_return_a_promise_if_fields_are_all_synchronous(): + doc = 'query Example { syncField }' + assert execute(schema, parse(doc), 'rootValue') == ( + {'syncField': 'rootValue'}, None) + + def does_not_return_a_promise_if_mutation_fields_are_all_synchronous(): + doc = 'mutation Example { syncMutationField }' + assert execute(schema, parse(doc), 'rootValue') == ( + {'syncMutationField': 'rootValue'}, None) + + @mark.asyncio + async def returns_a_promise_if_any_field_is_asynchronous(): + doc = 'query Example { syncField, asyncField }' + result = execute(schema, parse(doc), 'rootValue') + assert isawaitable(result) + assert await result == ( + {'syncField': 'rootValue', 'asyncField': 'rootValue'}, None) + + def describe_graphql_sync(): + + def does_not_return_a_promise_for_syntax_errors(): + doc = 'fragment Example on Query { { { syncField }' + assert graphql_sync(schema, doc) == (None, [{ + 'message': 'Syntax Error: Expected Name, found {', + 'locations': [(1, 29)]}]) + + def does_not_return_a_promise_for_validation_errors(): + doc = 'fragment Example on Query { unknownField }' + assert graphql_sync(schema, doc) == (None, [{ + 'message': "Cannot query field 'unknownField' on type 'Query'." + " Did you mean 'syncField' or 'asyncField'?", + 'locations': [(1, 29)] + }, { + 'message': "Fragment 'Example' is never used.", + 'locations': [(1, 1)] + }]) + + def does_not_return_a_promise_for_sync_execution(): + doc = 'query Example { syncField }' + assert graphql_sync(schema, doc, 'rootValue') == ( + {'syncField': 'rootValue'}, None) + + def throws_if_encountering_async_operation(): + doc = 'query Example { syncField, asyncField }' + with raises(RuntimeError) as exc_info: + graphql_sync(schema, doc, 'rootValue') + msg = str(exc_info.value) + assert msg == 'GraphQL execution failed to complete synchronously.' diff --git a/tests/execution/test_union_interface.py b/tests/execution/test_union_interface.py new file mode 100644 index 00000000..f1474083 --- /dev/null +++ b/tests/execution/test_union_interface.py @@ -0,0 +1,294 @@ +from typing import NamedTuple, Union, List + +from graphql.execution import execute +from graphql.language import parse +from graphql.type import ( + GraphQLBoolean, GraphQLField, GraphQLInterfaceType, GraphQLList, + GraphQLObjectType, GraphQLSchema, GraphQLString, GraphQLUnionType) + + +class Dog(NamedTuple): + + name: str + barks: bool + + +class Cat(NamedTuple): + + name: str + meows: bool + + +Pet = Union[Dog, Cat] + + +class Person(NamedTuple): + + name: str + pets: List[Pet] + friends: List['Person'] + + +NamedType = GraphQLInterfaceType('Named', { + 'name': GraphQLField(GraphQLString)}) + +DogType = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString), + 'barks': GraphQLField(GraphQLBoolean)}, + interfaces=[NamedType], + is_type_of=lambda value, info: isinstance(value, Dog)) + +CatType = GraphQLObjectType('Cat', { + 'name': GraphQLField(GraphQLString), + 'meows': GraphQLField(GraphQLBoolean)}, + interfaces=[NamedType], + is_type_of=lambda value, info: isinstance(value, Cat)) + + +def resolve_pet_type(value, info): + if isinstance(value, Dog): + return DogType + if isinstance(value, Cat): + return CatType + + +PetType = GraphQLUnionType( + 'Pet', [DogType, CatType], resolve_type=resolve_pet_type) + +PersonType = GraphQLObjectType('Person', { + 'name': GraphQLField(GraphQLString), + 'pets': GraphQLField(GraphQLList(PetType)), + 'friends': GraphQLField(GraphQLList(NamedType))}, + interfaces=[NamedType], + is_type_of=lambda value, info: isinstance(value, Person)) + +schema = GraphQLSchema(PersonType, types=[PetType]) + +garfield = Cat('Garfield', False) +odie = Dog('Odie', True) +liz = Person('Liz', [], []) +john = Person('John', [garfield, odie], [liz, odie]) + + +def describe_execute_union_and_intersection_types(): + + def can_introspect_on_union_and_intersection_types(): + ast = parse(""" + { + Named: __type(name: "Named") { + kind + name + fields { name } + interfaces { name } + possibleTypes { name } + enumValues { name } + inputFields { name } + } + Pet: __type(name: "Pet") { + kind + name + fields { name } + interfaces { name } + possibleTypes { name } + enumValues { name } + inputFields { name } + } + } + """) + + assert execute(schema, ast) == ({ + 'Named': { + 'kind': 'INTERFACE', + 'name': 'Named', + 'fields': [{'name': 'name'}], + 'interfaces': None, + 'possibleTypes': [ + {'name': 'Person'}, {'name': 'Dog'}, {'name': 'Cat'}], + 'enumValues': None, + 'inputFields': None}, + 'Pet': { + 'kind': 'UNION', + 'name': 'Pet', + 'fields': None, + 'interfaces': None, + 'possibleTypes': [{'name': 'Dog'}, {'name': 'Cat'}], + 'enumValues': None, + 'inputFields': None}}, + None) + + def executes_using_union_types(): + # NOTE: This is an *invalid* query, but it should be *executable*. + ast = parse(""" + { + __typename + name + pets { + __typename + name + barks + meows + } + } + """) + + assert execute(schema, ast, john) == ({ + '__typename': 'Person', + 'name': 'John', + 'pets': [ + {'__typename': 'Cat', 'name': 'Garfield', 'meows': False}, + {'__typename': 'Dog', 'name': 'Odie', 'barks': True}]}, + None) + + def executes_union_types_with_inline_fragment(): + # This is the valid version of the query in the above test. + ast = parse(""" + { + __typename + name + pets { + __typename + ... on Dog { + name + barks + } + ... on Cat { + name + meows + } + } + } + """) + + assert execute(schema, ast, john) == ({ + '__typename': 'Person', + 'name': 'John', + 'pets': [ + {'__typename': 'Cat', 'name': 'Garfield', 'meows': False}, + {'__typename': 'Dog', 'name': 'Odie', 'barks': True}]}, + None) + + def executes_using_interface_types(): + # NOTE: This is an *invalid* query, but it should be a *executable*. + ast = parse(""" + { + __typename + name + friends { + __typename + name + barks + meows + } + } + """) + + assert execute(schema, ast, john) == ({ + '__typename': 'Person', + 'name': 'John', + 'friends': [ + {'__typename': 'Person', 'name': 'Liz'}, + {'__typename': 'Dog', 'name': 'Odie', 'barks': True}]}, + None) + + def executes_interface_types_with_inline_fragment(): + # This is the valid version of the query in the above test. + ast = parse(""" + { + __typename + name + friends { + __typename + name + ... on Dog { + barks + } + ... on Cat { + meows + } + } + } + """) + + assert execute(schema, ast, john) == ({ + '__typename': 'Person', + 'name': 'John', + 'friends': [ + {'__typename': 'Person', 'name': 'Liz'}, + {'__typename': 'Dog', 'name': 'Odie', 'barks': True}]}, + None) + + def allows_fragment_conditions_to_be_abstract_types(): + ast = parse(""" + { + __typename + name + pets { ...PetFields } + friends { ...FriendFields } + } + + fragment PetFields on Pet { + __typename + ... on Dog { + name + barks + } + ... on Cat { + name + meows + } + } + + fragment FriendFields on Named { + __typename + name + ... on Dog { + barks + } + ... on Cat { + meows + } + } + """) + + assert execute(schema, ast, john) == ({ + '__typename': 'Person', + 'name': 'John', + 'pets': [ + {'__typename': 'Cat', 'name': 'Garfield', 'meows': False}, + {'__typename': 'Dog', 'name': 'Odie', 'barks': True}], + 'friends': [ + {'__typename': 'Person', 'name': 'Liz'}, + {'__typename': 'Dog', 'name': 'Odie', 'barks': True}]}, + None) + + def gets_execution_info_in_resolver(): + encountered = {} + + def resolve_type(obj, info): + encountered['context'] = info.context + encountered['schema'] = info.schema + encountered['root_value'] = info.root_value + return PersonType2 + + NamedType2 = GraphQLInterfaceType('Named', { + 'name': GraphQLField(GraphQLString)}, + resolve_type=resolve_type) + + PersonType2 = GraphQLObjectType('Person', { + 'name': GraphQLField(GraphQLString), + 'friends': GraphQLField(GraphQLList(NamedType2))}, + interfaces=[NamedType2]) + + schema2 = GraphQLSchema(PersonType2) + + john2 = Person('John', [], [liz]) + + context = {'authToken': '123abc'} + + ast = parse('{ name, friends { name } }') + + assert execute(schema2, ast, john2, context) == ({ + 'name': 'John', 'friends': [{'name': 'Liz'}]}, None) + + assert encountered == { + 'schema': schema2, 'root_value': john2, 'context': context} diff --git a/tests/execution/test_variables.py b/tests/execution/test_variables.py new file mode 100644 index 00000000..13b7d5fd --- /dev/null +++ b/tests/execution/test_variables.py @@ -0,0 +1,717 @@ +from math import nan + +from graphql.error import INVALID +from graphql.execution import execute +from graphql.language import parse +from graphql.type import ( + GraphQLArgument, GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLInputField, GraphQLInputObjectType, GraphQLList, GraphQLNonNull, + GraphQLObjectType, GraphQLScalarType, GraphQLSchema, GraphQLString) + +TestComplexScalar = GraphQLScalarType( + name='ComplexScalar', + serialize=lambda value: + 'SerializedValue' if value == 'DeserializedValue' else None, + parse_value=lambda value: + 'DeserializedValue' if value == 'SerializedValue' else None, + parse_literal=lambda ast, _variables=None: + 'DeserializedValue' if ast.value == 'SerializedValue' else None) + + +TestInputObject = GraphQLInputObjectType('TestInputObject', { + 'a': GraphQLInputField(GraphQLString), + 'b': GraphQLInputField(GraphQLList(GraphQLString)), + 'c': GraphQLInputField(GraphQLNonNull(GraphQLString)), + 'd': GraphQLInputField(TestComplexScalar)}) + + +TestNestedInputObject = GraphQLInputObjectType('TestNestedInputObject', { + 'na': GraphQLInputField(GraphQLNonNull(TestInputObject)), + 'nb': GraphQLInputField(GraphQLNonNull(GraphQLString))}) + + +TestEnum = GraphQLEnumType('TestEnum', { + 'NULL': None, + 'UNDEFINED': INVALID, + 'NAN': nan, + 'FALSE': False, + 'CUSTOM': 'custom value', + 'DEFAULT_VALUE': GraphQLEnumValue()}) + + +def field_with_input_arg(input_arg: GraphQLArgument): + return GraphQLField( + GraphQLString, args={'input': input_arg}, + resolve=lambda _obj, _info, **args: + repr(args['input']) if 'input' in args else None) + + +TestType = GraphQLObjectType('TestType', { + 'fieldWithEnumInput': field_with_input_arg(GraphQLArgument(TestEnum)), + 'fieldWithNonNullableEnumInput': field_with_input_arg(GraphQLArgument( + GraphQLNonNull(TestEnum))), + 'fieldWithObjectInput': field_with_input_arg(GraphQLArgument( + TestInputObject)), + 'fieldWithNullableStringInput': field_with_input_arg(GraphQLArgument( + GraphQLString)), + 'fieldWithNonNullableStringInput': field_with_input_arg(GraphQLArgument( + GraphQLNonNull(GraphQLString))), + 'fieldWithDefaultArgumentValue': field_with_input_arg(GraphQLArgument( + GraphQLString, default_value='Hello World')), + 'fieldWithNonNullableStringInputAndDefaultArgumentValue': + field_with_input_arg(GraphQLArgument(GraphQLNonNull( + GraphQLString), default_value='Hello World')), + 'fieldWithNestedInputObject': field_with_input_arg( + GraphQLArgument(TestNestedInputObject, default_value='Hello World')), + 'list': field_with_input_arg(GraphQLArgument( + GraphQLList(GraphQLString))), + 'nnList': field_with_input_arg(GraphQLArgument( + GraphQLNonNull(GraphQLList(GraphQLString)))), + 'listNN': field_with_input_arg(GraphQLArgument( + GraphQLList(GraphQLNonNull(GraphQLString)))), + 'nnListNN': field_with_input_arg(GraphQLArgument( + GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLString)))))}) + +schema = GraphQLSchema(TestType) + + +def execute_query(query, variable_values=None): + document = parse(query) + return execute(schema, document, variable_values=variable_values) + + +def describe_execute_handles_inputs(): + + def describe_handles_objects_and_nullability(): + + def describe_using_inline_struct(): + + def executes_with_complex_input(): + result = execute_query(""" + { + fieldWithObjectInput( + input: {a: "foo", b: ["bar"], c: "baz"}) + } + """) + + assert result == ({ + 'fieldWithObjectInput': + "{'a': 'foo', 'b': ['bar'], 'c': 'baz'}"}, None) + + def properly_parses_single_value_to_list(): + result = execute_query(""" + { + fieldWithObjectInput( + input: {a: "foo", b: "bar", c: "baz"}) + } + """) + + assert result == ({ + 'fieldWithObjectInput': + "{'a': 'foo', 'b': ['bar'], 'c': 'baz'}"}, None) + + def properly_parses_null_value_to_null(): + result = execute_query(""" + { + fieldWithObjectInput( + input: {a: null, b: null, c: "C", d: null}) + } + """) + + assert result == ({ + 'fieldWithObjectInput': + "{'a': None, 'b': None, 'c': 'C', 'd': None}"}, + None) + + def properly_parses_null_value_in_list(): + result = execute_query(""" + { + fieldWithObjectInput(input: {b: ["A",null,"C"], c: "C"}) + } + """) + + assert result == ({ + 'fieldWithObjectInput': + "{'b': ['A', None, 'C'], 'c': 'C'}"}, None) + + def does_not_use_incorrect_value(): + result = execute_query(""" + { + fieldWithObjectInput(input: ["foo", "bar", "baz"]) + } + """) + + assert result == ({'fieldWithObjectInput': None}, [{ + 'message': "Argument 'input' has invalid value" + ' ["foo", "bar", "baz"].', + 'path': ['fieldWithObjectInput'], + 'locations': [(3, 51)]}]) + + def properly_runs_parse_literal_on_complex_scalar_types(): + result = execute_query(""" + { + fieldWithObjectInput( + input: {c: "foo", d: "SerializedValue"}) + } + """) + + assert result == ({ + 'fieldWithObjectInput': + "{'c': 'foo', 'd': 'DeserializedValue'}"}, None) + + def describe_using_variables(): + doc = """ + query ($input: TestInputObject) { + fieldWithObjectInput(input: $input) + } + """ + + def executes_with_complex_input(): + params = {'input': {'a': 'foo', 'b': ['bar'], 'c': 'baz'}} + result = execute_query(doc, params) + + assert result == ({ + 'fieldWithObjectInput': + "{'a': 'foo', 'b': ['bar'], 'c': 'baz'}"}, None) + + def uses_undefined_when_variable_not_provided(): + result = execute_query(""" + query q($input: String) { + fieldWithNullableStringInput(input: $input) + } + """, {}) # Intentionally missing variable values. + + assert result == ({'fieldWithNullableStringInput': None}, None) + + def uses_null_when_variable_provided_explicit_null_value(): + result = execute_query(""" + query q($input: String) { + fieldWithNullableStringInput(input: $input) + } + """, {'input': None}) + + assert result == ( + {'fieldWithNullableStringInput': 'None'}, None) + + def uses_default_value_when_not_provided(): + result = execute_query(""" + query ($input: TestInputObject = { + a: "foo", b: ["bar"], c: "baz"}) { + fieldWithObjectInput(input: $input) + } + """) + + assert result == ({ + 'fieldWithObjectInput': + "{'a': 'foo', 'b': ['bar'], 'c': 'baz'}"}, None) + + def does_not_use_default_value_when_provided(): + result = execute_query(""" + query q($input: String = "Default value") { + fieldWithNullableStringInput(input: $input) + } + """, {'input': 'Variable value'}) + + assert result == ( + {'fieldWithNullableStringInput': "'Variable value'"}, None) + + def uses_explicit_null_value_instead_of_default_value(): + result = execute_query(""" + query q($input: String = "Default value") { + fieldWithNullableStringInput(input: $input) + } + """, {'input': None}) + + assert result == ( + {'fieldWithNullableStringInput': 'None'}, None) + + def uses_null_default_value_when_not_provided(): + result = execute_query(""" + query q($input: String = null) { + fieldWithNullableStringInput(input: $input) + } + """, {}) # Intentionally missing variable values. + + assert result == ( + {'fieldWithNullableStringInput': 'None'}, None) + + def properly_parses_single_value_to_list(): + params = {'input': {'a': 'foo', 'b': 'bar', 'c': 'baz'}} + result = execute_query(doc, params) + + assert result == ({ + 'fieldWithObjectInput': + "{'a': 'foo', 'b': ['bar'], 'c': 'baz'}"}, None) + + def executes_with_complex_scalar_input(): + params = {'input': {'c': 'foo', 'd': 'SerializedValue'}} + result = execute_query(doc, params) + + assert result == ({ + 'fieldWithObjectInput': + "{'c': 'foo', 'd': 'DeserializedValue'}"}, None) + + def errors_on_null_for_nested_non_null(): + params = {'input': {'a': 'foo', 'b': 'bar', 'c': None}} + result = execute_query(doc, params) + + assert result == (None, [{ + 'message': "Variable '$input' got invalid value" + " {'a': 'foo', 'b': 'bar', 'c': None};" + ' Expected non-nullable type String!' + ' not to be null at value.c.', + 'locations': [(2, 24)], 'path': None}]) + + def errors_on_incorrect_type(): + result = execute_query(doc, {'input': 'foo bar'}) + + assert result == (None, [{ + 'message': + "Variable '$input' got invalid value 'foo bar';" + ' Expected type TestInputObject to be a dict.', + 'locations': [(2, 24)], 'path': None}]) + + def errors_on_omission_of_nested_non_null(): + result = execute_query( + doc, {'input': {'a': 'foo', 'b': 'bar'}}) + + assert result == (None, [{ + 'message': + "Variable '$input' got invalid value" + " {'a': 'foo', 'b': 'bar'}; Field value.c" + ' of required type String! was not provided.', + 'locations': [(2, 24)]}]) + + def errors_on_deep_nested_errors_and_with_many_errors(): + nested_doc = """ + query ($input: TestNestedInputObject) { + fieldWithNestedObjectInput(input: $input) + } + """ + result = execute_query( + nested_doc, {'input': {'na': {'a': 'foo'}}}) + + assert result == (None, [{ + 'message': + "Variable '$input' got invalid value" + " {'na': {'a': 'foo'}}; Field value.na.c" + ' of required type String! was not provided.', + 'locations': [(2, 28)]}, { + 'message': + "Variable '$input' got invalid value" + " {'na': {'a': 'foo'}}; Field value.nb" + ' of required type String! was not provided.', + 'locations': [(2, 28)]}]) + + def errors_on_addition_of_unknown_input_field(): + params = {'input': { + 'a': 'foo', 'b': 'bar', 'c': 'baz', 'extra': 'dog'}} + result = execute_query(doc, params) + + assert result == (None, [{ + 'message': + "Variable '$input' got invalid value {'a': 'foo'," + " 'b': 'bar', 'c': 'baz', 'extra': 'dog'}; Field" + " 'extra' is not defined by type TestInputObject.", + 'locations': [(2, 24)]}]) + + def describe_handles_custom_enum_values(): + + def allows_custom_enum_values_as_inputs(): + result = execute_query(""" + { + null: fieldWithEnumInput(input: NULL) + NaN: fieldWithEnumInput(input: NAN) + false: fieldWithEnumInput(input: FALSE) + customValue: fieldWithEnumInput(input: CUSTOM) + defaultValue: fieldWithEnumInput(input: DEFAULT_VALUE) + } + """) + + assert result == ({ + 'null': 'None', + 'NaN': 'nan', + 'false': 'False', + 'customValue': "'custom value'", + # different from graphql.js, enum values are always wrapped + 'defaultValue': 'None' + }, None) + + def allows_non_nullable_inputs_to_have_null_as_enum_custom_value(): + result = execute_query(""" + { + fieldWithNonNullableEnumInput(input: NULL) + } + """) + + assert result == ({'fieldWithNonNullableEnumInput': 'None'}, None) + + def describe_handles_nullable_scalars(): + + def allows_nullable_inputs_to_be_omitted(): + result = execute_query(""" + { + fieldWithNullableStringInput + } + """) + + assert result == ({'fieldWithNullableStringInput': None}, None) + + def allows_nullable_inputs_to_be_omitted_in_a_variable(): + result = execute_query(""" + query ($value: String) { + fieldWithNullableStringInput(input: $value) + } + """) + + assert result == ({'fieldWithNullableStringInput': None}, None) + + def allows_nullable_inputs_to_be_omitted_in_an_unlisted_variable(): + result = execute_query(""" + query SetsNullable { + fieldWithNullableStringInput(input: $value) + } + """) + + assert result == ({'fieldWithNullableStringInput': None}, None) + + def allows_nullable_inputs_to_be_set_to_null_in_a_variable(): + doc = """ + query SetsNullable($value: String) { + fieldWithNullableStringInput(input: $value) + } + """ + result = execute_query(doc, {'value': None}) + + assert result == ({'fieldWithNullableStringInput': 'None'}, None) + + def allows_nullable_inputs_to_be_set_to_a_value_in_a_variable(): + doc = """ + query SetsNullable($value: String) { + fieldWithNullableStringInput(input: $value) + } + """ + result = execute_query(doc, {'value': 'a'}) + + assert result == ({'fieldWithNullableStringInput': "'a'"}, None) + + def allows_nullable_inputs_to_be_set_to_a_value_directly(): + result = execute_query(""" + { + fieldWithNullableStringInput(input: "a") + } + """) + + assert result == ({'fieldWithNullableStringInput': "'a'"}, None) + + def describe_handles_non_nullable_scalars(): + + def allows_non_nullable_inputs_to_be_omitted_given_a_default(): + result = execute_query(""" + query ($value: String = "default") { + fieldWithNonNullableStringInput(input: $value) + } + """) + + assert result == ({ + 'fieldWithNonNullableStringInput': "'default'"}, None) + + def does_not_allow_non_nullable_inputs_to_be_omitted_in_a_variable(): + result = execute_query(""" + query ($value: String!) { + fieldWithNonNullableStringInput(input: $value) + } + """) + + assert result == (None, [{ + 'message': "Variable '$value' of required type 'String!'" + ' was not provided.', + 'locations': [(2, 24)], 'path': None}]) + + def does_not_allow_non_nullable_inputs_to_be_set_to_null_in_variable(): + doc = """ + query ($value: String!) { + fieldWithNonNullableStringInput(input: $value) + } + """ + result = execute_query(doc, {'value': None}) + + assert result == (None, [{ + 'message': "Variable '$value' of non-null type 'String!'" + ' must not be null.', + 'locations': [(2, 24)], 'path': None}]) + + def allows_non_nullable_inputs_to_be_set_to_a_value_in_a_variable(): + doc = """ + query ($value: String!) { + fieldWithNonNullableStringInput(input: $value) + } + """ + result = execute_query(doc, {'value': 'a'}) + + assert result == ({'fieldWithNonNullableStringInput': "'a'"}, None) + + def allows_non_nullable_inputs_to_be_set_to_a_value_directly(): + result = execute_query(""" + { + fieldWithNonNullableStringInput(input: "a") + } + """) + + assert result == ({'fieldWithNonNullableStringInput': "'a'"}, None) + + def reports_error_for_missing_non_nullable_inputs(): + result = execute_query('{ fieldWithNonNullableStringInput }') + + assert result == ({'fieldWithNonNullableStringInput': None}, [{ + 'message': "Argument 'input' of required type 'String!'" + ' was not provided.', + 'locations': [(1, 3)], + 'path': ['fieldWithNonNullableStringInput']}]) + + def reports_error_for_array_passed_into_string_input(): + doc = """ + query ($value: String!) { + fieldWithNonNullableStringInput(input: $value) + } + """ + result = execute_query(doc, {'value': [1, 2, 3]}) + + assert result == (None, [{ + 'message': "Variable '$value' got invalid value [1, 2, 3];" + ' Expected type String; String cannot represent' + ' a non string value: [1, 2, 3]', + 'locations': [(2, 24)], 'path':None}]) + + def reports_error_for_non_provided_variables_for_non_nullable_inputs(): + # Note: this test would typically fail validation before + # encountering this execution error, however for queries which + # previously validated and are being run against a new schema which + # have introduced a breaking change to make a formerly non-required + # argument required, this asserts failure before allowing the + # underlying code to receive a non-null value. + result = execute_query(""" + { + fieldWithNonNullableStringInput(input: $foo) + } + """) + + assert result == ({'fieldWithNonNullableStringInput': None}, [{ + 'message': "Argument 'input' of required type 'String!'" + " was provided the variable '$foo' which was" + ' not provided a runtime value.', + 'locations': [(3, 58)], + 'path': ['fieldWithNonNullableStringInput']}]) + + def describe_handles_lists_and_nullability(): + + def allows_lists_to_be_null(): + doc = """ + query ($input: [String]) { + list(input: $input) + } + """ + result = execute_query(doc, {'input': None}) + + assert result == ({'list': 'None'}, None) + + def allows_lists_to_contain_values(): + doc = """ + query ($input: [String]) { + list(input: $input) + } + """ + result = execute_query(doc, {'input': ['A']}) + + assert result == ({'list': "['A']"}, None) + + def allows_lists_to_contain_null(): + doc = """ + query ($input: [String]) { + list(input: $input) + } + """ + + result = execute_query(doc, {'input': ['A', None, 'B']}) + + assert result == ({'list': "['A', None, 'B']"}, None) + + def does_not_allow_non_null_lists_to_be_null(): + doc = """ + query ($input: [String]!) { + nnList(input: $input) + } + """ + + result = execute_query(doc, {'input': None}) + + assert result == (None, [{ + 'message': "Variable '$input' of non-null type '[String]!'" + ' must not be null.', + 'locations': [(2, 24)], 'path': None}]) + + def allows_non_null_lists_to_contain_values(): + doc = """ + query ($input: [String]!) { + nnList(input: $input) + } + """ + + result = execute_query(doc, {'input': ['A']}) + + assert result == ({'nnList': "['A']"}, None) + + def allows_non_null_lists_to_contain_null(): + doc = """ + query ($input: [String]!) { + nnList(input: $input) + } + """ + + result = execute_query(doc, {'input': ['A', None, 'B']}) + + assert result == ({'nnList': "['A', None, 'B']"}, None) + + def allows_lists_of_non_nulls_to_be_null(): + doc = """ + query ($input: [String!]) { + listNN(input: $input) + } + """ + + result = execute_query(doc, {'input': None}) + + assert result == ({'listNN': 'None'}, None) + + def allows_lists_of_non_nulls_to_contain_values(): + doc = """ + query ($input: [String!]) { + listNN(input: $input) + } + """ + + result = execute_query(doc, {'input': ['A']}) + + assert result == ({'listNN': "['A']"}, None) + + def does_not_allow_lists_of_non_nulls_to_contain_null(): + doc = """ + query ($input: [String!]) { + listNN(input: $input) + } + """ + result = execute_query(doc, {'input': ['A', None, 'B']}) + + assert result == (None, [{ + 'message': "Variable '$input' got invalid value" + " ['A', None, 'B']; Expected non-nullable type" + ' String! not to be null at value[1].', + 'locations': [(2, 24)]}]) + + def does_not_allow_non_null_lists_of_non_nulls_to_be_null(): + doc = """ + query ($input: [String!]!) { + nnListNN(input: $input) + } + """ + result = execute_query(doc, {'input': None}) + + assert result == (None, [{ + 'message': "Variable '$input' of non-null type '[String!]!'" + ' must not be null.', + 'locations': [(2, 24)]}]) + + def allows_non_null_lists_of_non_nulls_to_contain_values(): + doc = """ + query ($input: [String!]!) { + nnListNN(input: $input) + } + """ + result = execute_query(doc, {'input': ['A']}) + + assert result == ({'nnListNN': "['A']"}, None) + + def does_not_allow_non_null_lists_of_non_nulls_to_contain_null(): + doc = """ + query ($input: [String!]!) { + nnListNN(input: $input) + } + """ + result = execute_query(doc, {'input': ['A', None, 'B']}) + + assert result == (None, [{ + 'message': "Variable '$input' got invalid value" + " ['A', None, 'B']; Expected non-nullable type" + ' String! not to be null at value[1].', + 'locations': [(2, 24)], 'path': None}]) + + def does_not_allow_invalid_types_to_be_used_as_values(): + doc = """ + query ($input: TestType!) { + fieldWithObjectInput(input: $input) + } + """ + result = execute_query(doc, {'input': {'list': ['A', 'B']}}) + + assert result == (None, [{ + 'message': "Variable '$input' expected value" + " of type 'TestType!' which cannot" + ' be used as an input type.', + 'locations': [(2, 32)]}]) + + def does_not_allow_unknown_types_to_be_used_as_values(): + doc = """ + query ($input: UnknownType!) { + fieldWithObjectInput(input: $input) + } + """ + result = execute_query(doc, {'input': 'whoknows'}) + + assert result == (None, [{ + 'message': "Variable '$input' expected value" + " of type 'UnknownType!' which cannot" + ' be used as an input type.', + 'locations': [(2, 32)]}]) + + def describe_execute_uses_argument_default_values(): + + def when_no_argument_provided(): + result = execute_query('{ fieldWithDefaultArgumentValue }') + + assert result == ({ + 'fieldWithDefaultArgumentValue': "'Hello World'"}, None) + + def when_omitted_variable_provided(): + result = execute_query(""" + query ($optional: String) { + fieldWithDefaultArgumentValue(input: $optional) + } + """) + + assert result == ({ + 'fieldWithDefaultArgumentValue': "'Hello World'"}, None) + + def not_when_argument_cannot_be_coerced(): + result = execute_query(""" + { + fieldWithDefaultArgumentValue(input: WRONG_TYPE) + } + """) + + assert result == ({ + 'fieldWithDefaultArgumentValue': None}, [{ + 'message': "Argument 'input' has invalid value" + ' WRONG_TYPE.', + 'locations': [(3, 56)], + 'path': ['fieldWithDefaultArgumentValue']}]) + + def when_no_runtime_value_is_provided_to_a_non_null_argument(): + result = execute_query(""" + query optionalVariable($optional: String) { + fieldWithNonNullableStringInputAndDefaultArgumentValue(input: $optional) + } + """) # noqa + + assert result == ( + {'fieldWithNonNullableStringInputAndDefaultArgumentValue': + "'Hello World'"}, None) diff --git a/tests/language/__init__.py b/tests/language/__init__.py new file mode 100644 index 00000000..626b98d6 --- /dev/null +++ b/tests/language/__init__.py @@ -0,0 +1,20 @@ +"""Tests for graphql.language""" + +from os.path import dirname, join + +from pytest import fixture + + +def read_graphql(name): + path = join(dirname(__file__), name + '.graphql') + return open(path, encoding='utf-8').read() + + +@fixture(scope='module') +def kitchen_sink(): + return read_graphql('kitchen_sink') + + +@fixture(scope='module') +def schema_kitchen_sink(): + return read_graphql('schema_kitchen_sink') diff --git a/tests/language/kitchen_sink.graphql b/tests/language/kitchen_sink.graphql new file mode 100644 index 00000000..6fcf394b --- /dev/null +++ b/tests/language/kitchen_sink.graphql @@ -0,0 +1,59 @@ +# Copyright (c) 2015-present, Facebook, Inc. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +query queryName($foo: ComplexType, $site: Site = MOBILE) { + whoever123is: node(id: [123, 456]) { + id , + ... on User @defer { + field2 { + id , + alias: field1(first:10, after:$foo,) @include(if: $foo) { + id, + ...frag + } + } + } + ... @skip(unless: $foo) { + id + } + ... { + id + } + } +} + +mutation likeStory { + like(story: 123) @defer { + story { + id + } + } +} + +subscription StoryLikeSubscription($input: StoryLikeSubscribeInput) { + storyLikeSubscribe(input: $input) { + story { + likers { + count + } + likeSentence { + text + } + } + } +} + +fragment frag on Friend { + foo(size: $size, bar: $b, obj: {key: "value", block: """ + + block string uses \""" + + """}) +} + +{ + unnamed(truthy: true, falsey: false, nullish: null), + query +} diff --git a/tests/language/schema_kitchen_sink.graphql b/tests/language/schema_kitchen_sink.graphql new file mode 100644 index 00000000..1c7b5c3b --- /dev/null +++ b/tests/language/schema_kitchen_sink.graphql @@ -0,0 +1,131 @@ +# Copyright (c) 2015-present, Facebook, Inc. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +schema { + query: QueryType + mutation: MutationType +} + +""" +This is a description +of the `Foo` type. +""" +type Foo implements Bar & Baz { + one: Type + """ + This is a description of the `two` field. + """ + two( + """ + This is a description of the `argument` argument. + """ + argument: InputType! + ): Type + three(argument: InputType, other: String): Int + four(argument: String = "string"): String + five(argument: [String] = ["string", "string"]): String + six(argument: InputType = {key: "value"}): Type + seven(argument: Int = null): Type +} + +type AnnotatedObject @onObject(arg: "value") { + annotatedField(arg: Type = "default" @onArg): Type @onField +} + +type UndefinedType + +extend type Foo { + seven(argument: [String]): Type +} + +extend type Foo @onType + +interface Bar { + one: Type + four(argument: String = "string"): String +} + +interface AnnotatedInterface @onInterface { + annotatedField(arg: Type @onArg): Type @onField +} + +interface UndefinedInterface + +extend interface Bar { + two(argument: InputType!): Type +} + +extend interface Bar @onInterface + +union Feed = Story | Article | Advert + +union AnnotatedUnion @onUnion = A | B + +union AnnotatedUnionTwo @onUnion = | A | B + +union UndefinedUnion + +extend union Feed = Photo | Video + +extend union Feed @onUnion + +scalar CustomScalar + +scalar AnnotatedScalar @onScalar + +extend scalar CustomScalar @onScalar + +enum Site { + DESKTOP + MOBILE +} + +enum AnnotatedEnum @onEnum { + ANNOTATED_VALUE @onEnumValue + OTHER_VALUE +} + +enum UndefinedEnum + +extend enum Site { + VR +} + +extend enum Site @onEnum + +input InputType { + key: String! + answer: Int = 42 +} + +input AnnotatedInput @onInputObject { + annotatedField: Type @onField +} + +input UndefinedInput + +extend input InputType { + other: Float = 1.23e4 +} + +extend input InputType @onInputObject + +directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +directive @include(if: Boolean!) + on FIELD + | FRAGMENT_SPREAD + | INLINE_FRAGMENT + +directive @include2(if: Boolean!) on + | FIELD + | FRAGMENT_SPREAD + | INLINE_FRAGMENT + +extend schema @onSchema + +extend schema @onSchema { + subscription: SubscriptionType +} diff --git a/tests/language/test_ast.py b/tests/language/test_ast.py new file mode 100644 index 00000000..0a86e92a --- /dev/null +++ b/tests/language/test_ast.py @@ -0,0 +1,51 @@ +from copy import copy + +from graphql.language import Node + + +class SampleTestNode(Node): + __slots__ = 'alpha', 'beta' + + +def describe_node_class(): + + def initializes_with_keywords(): + node = SampleTestNode(alpha=1, beta=2, loc=0) + assert node.alpha == 1 + assert node.beta == 2 + assert node.loc == 0 + node = SampleTestNode(alpha=1, loc=None) + assert node.loc is None + assert node.alpha == 1 + assert node.beta is None + node = SampleTestNode(alpha=1, beta=2, gamma=3) + assert node.alpha == 1 + assert node.beta == 2 + assert not hasattr(node, 'gamma') + + def has_representation_with_loc(): + node = SampleTestNode(alpha=1, beta=2) + assert repr(node) == 'SampleTestNode' + node = SampleTestNode(alpha=1, beta=2, loc=3) + assert repr(node) == 'SampleTestNode at 3' + + def can_check_equality(): + node = SampleTestNode(alpha=1, beta=2) + node2 = SampleTestNode(alpha=1, beta=2) + assert node2 == node + node2 = SampleTestNode(alpha=1, beta=1) + assert node2 != node + node2 = Node(alpha=1, beta=2) + assert node2 != node + + def can_create_shallow_copy(): + node = SampleTestNode(alpha=1, beta=2) + node2 = copy(node) + assert node2 is not node + assert node2 == node + + def provides_snake_cased_kind_as_class_attribute(): + assert SampleTestNode.kind == 'sample_test' + + def provides_keys_as_class_attribute(): + assert SampleTestNode.keys == ['loc', 'alpha', 'beta'] diff --git a/tests/language/test_block_string_value.py b/tests/language/test_block_string_value.py new file mode 100644 index 00000000..2d4948ce --- /dev/null +++ b/tests/language/test_block_string_value.py @@ -0,0 +1,73 @@ +from graphql.language.block_string_value import block_string_value + + +def join(*args): + return '\n'.join(args) + + +def describe_block_string_value(): + + def removes_uniform_indentation_from_a_string(): + raw_value = join( + '', + ' Hello,', + ' World!', + '', + ' Yours,', + ' GraphQL.') + assert block_string_value(raw_value) == join( + 'Hello,', ' World!', '', 'Yours,', ' GraphQL.') + + def removes_empty_leading_and_trailing_lines(): + raw_value = join( + '', + '', + ' Hello,', + ' World!', + '', + ' Yours,', + ' GraphQL.', + '', + '') + assert block_string_value(raw_value) == join( + 'Hello,', ' World!', '', 'Yours,', ' GraphQL.') + + def removes_blank_leading_and_trailing_lines(): + raw_value = join( + ' ', + ' ', + ' Hello,', + ' World!', + '', + ' Yours,', + ' GraphQL.', + ' ', + ' ') + assert block_string_value(raw_value) == join( + 'Hello,', ' World!', '', 'Yours,', ' GraphQL.') + + def retains_indentation_from_first_line(): + raw_value = join( + ' Hello,', + ' World!', + '', + ' Yours,', + ' GraphQL.') + assert block_string_value(raw_value) == join( + ' Hello,', ' World!', '', 'Yours,', ' GraphQL.') + + def does_not_alter_trailing_spaces(): + raw_value = join( + ' ', + ' Hello, ', + ' World! ', + ' ', + ' Yours, ', + ' GraphQL. ', + ' ') + assert block_string_value(raw_value) == join( + 'Hello, ', + ' World! ', + ' ', + 'Yours, ', + ' GraphQL. ') diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py new file mode 100644 index 00000000..fe782909 --- /dev/null +++ b/tests/language/test_lexer.py @@ -0,0 +1,298 @@ +from pytest import raises + +from graphql.error import GraphQLSyntaxError +from graphql.language import ( + Lexer, Source, SourceLocation, Token, TokenKind) +from graphql.pyutils import dedent + + +def lex_one(s): + lexer = Lexer(Source(s)) + return lexer.advance() + + +def assert_syntax_error(text, message, location): + with raises(GraphQLSyntaxError) as exc_info: + lex_one(text) + error = exc_info.value + assert message in error.message + assert error.locations == [location] + + +def describe_lexer(): + + def disallows_uncommon_control_characters(): + assert_syntax_error( + '\x07', "Cannot contain the invalid character '\\x07'", (1, 1)) + + # noinspection PyArgumentEqualDefault + def accepts_bom_header(): + token = lex_one('\uFEFF foo') + assert token == Token(TokenKind.NAME, 2, 5, 1, 3, None, 'foo') + + # noinspection PyArgumentEqualDefault + def records_line_and_column(): + token = lex_one('\n \r\n \r foo\n') + assert token == Token(TokenKind.NAME, 8, 11, 4, 3, None, 'foo') + + def can_be_stringified(): + token = lex_one('foo') + assert repr(token) == "" + assert token.desc == "Name 'foo'" + + # noinspection PyArgumentEqualDefault + def skips_whitespace_and_comments(): + token = lex_one('\n\n foo\n\n\n') + assert token == Token(TokenKind.NAME, 6, 9, 3, 5, None, 'foo') + token = lex_one('\n #comment\n foo#comment\n') + assert token == Token(TokenKind.NAME, 18, 21, 3, 5, None, 'foo') + token = lex_one(',,,foo,,,') + assert token == Token(TokenKind.NAME, 3, 6, 1, 4, None, 'foo') + + def errors_respect_whitespace(): + with raises(GraphQLSyntaxError) as exc_info: + lex_one('\n\n ?\n\n\n') + + assert str(exc_info.value) == dedent(""" + Syntax Error: Cannot parse the unexpected character '?'. + + GraphQL request (3:5) + 2:\x20 + 3: ? + ^ + 4:\x20 + """) + + def updates_line_numbers_in_error_for_file_context(): + s = '\n\n ?\n\n' + source = Source(s, 'foo.js', SourceLocation(11, 12)) + with raises(GraphQLSyntaxError) as exc_info: + Lexer(source).advance() + assert str(exc_info.value) == dedent(""" + Syntax Error: Cannot parse the unexpected character '?'. + + foo.js (13:6) + 12:\x20 + 13: ? + ^ + 14:\x20 + """) + + def updates_column_numbers_in_error_for_file_context(): + source = Source('?', 'foo.js', SourceLocation(1, 5)) + with raises(GraphQLSyntaxError) as exc_info: + Lexer(source).advance() + assert str(exc_info.value) == dedent(""" + Syntax Error: Cannot parse the unexpected character '?'. + + foo.js (1:5) + 1: ? + ^ + """) + + # noinspection PyArgumentEqualDefault + def lexes_strings(): + assert lex_one('"simple"') == Token( + TokenKind.STRING, 0, 8, 1, 1, None, 'simple') + assert lex_one('" white space "') == Token( + TokenKind.STRING, 0, 15, 1, 1, None, ' white space ') + assert lex_one('"quote \\""') == Token( + TokenKind.STRING, 0, 10, 1, 1, None, 'quote "') + assert lex_one('"escaped \\n\\r\\b\\t\\f"') == Token( + TokenKind.STRING, 0, 20, 1, 1, None, 'escaped \n\r\b\t\f') + assert lex_one('"slashes \\\\ \\/"') == Token( + TokenKind.STRING, 0, 15, 1, 1, None, 'slashes \\ /') + assert lex_one('"unicode \\u1234\\u5678\\u90AB\\uCDEF"') == Token( + TokenKind.STRING, 0, 34, 1, 1, None, + 'unicode \u1234\u5678\u90AB\uCDEF') + + def lex_reports_useful_string_errors(): + assert_syntax_error('"', 'Unterminated string.', (1, 2)) + assert_syntax_error('"no end quote', 'Unterminated string.', (1, 14)) + assert_syntax_error( + "'single quotes'", "Unexpected single quote character ('), " + 'did you mean to use a double quote (")?', (1, 1)) + assert_syntax_error( + '"contains unescaped \x07 control char"', + "Invalid character within String: '\\x07'.", (1, 21)) + assert_syntax_error( + '"null-byte is not \x00 end of file"', + "Invalid character within String: '\\x00'.", (1, 19)) + assert_syntax_error( + '"multi\nline"', 'Unterminated string', (1, 7)) + assert_syntax_error( + '"multi\rline"', 'Unterminated string', (1, 7)) + assert_syntax_error( + '"bad \\x esc"', "Invalid character escape sequence: '\\x'.", + (1, 7)) + assert_syntax_error( + '"bad \\u1 esc"', + "Invalid character escape sequence: '\\u1 es'.", (1, 7)) + assert_syntax_error( + '"bad \\u0XX1 esc"', + "Invalid character escape sequence: '\\u0XX1'.", (1, 7)) + assert_syntax_error( + '"bad \\uXXXX esc"', + "Invalid character escape sequence: '\\uXXXX'.", (1, 7)) + assert_syntax_error( + '"bad \\uFXXX esc"', + "Invalid character escape sequence: '\\uFXXX'.", (1, 7)) + assert_syntax_error( + '"bad \\uXXXF esc"', + "Invalid character escape sequence: '\\uXXXF'.", (1, 7)) + + # noinspection PyArgumentEqualDefault + def lexes_block_strings(): + assert lex_one('"""simple"""') == Token( + TokenKind.BLOCK_STRING, 0, 12, 1, 1, None, 'simple') + assert lex_one('""" white space """') == Token( + TokenKind.BLOCK_STRING, 0, 19, 1, 1, None, ' white space ') + assert lex_one('"""contains " quote"""') == Token( + TokenKind.BLOCK_STRING, 0, 22, 1, 1, None, 'contains " quote') + assert lex_one('"""contains \\""" triplequote"""') == Token( + TokenKind.BLOCK_STRING, 0, 31, 1, 1, None, + 'contains """ triplequote') + assert lex_one('"""multi\nline"""') == Token( + TokenKind.BLOCK_STRING, 0, 16, 1, 1, None, 'multi\nline') + assert lex_one('"""multi\rline\r\nnormalized"""') == Token( + TokenKind.BLOCK_STRING, 0, 28, 1, 1, None, + 'multi\nline\nnormalized') + assert lex_one('"""unescaped \\n\\r\\b\\t\\f\\u1234"""') == Token( + TokenKind.BLOCK_STRING, 0, 32, 1, 1, None, + 'unescaped \\n\\r\\b\\t\\f\\u1234') + assert lex_one('"""slashes \\\\ \\/"""') == Token( + TokenKind.BLOCK_STRING, 0, 19, 1, 1, None, 'slashes \\\\ \\/') + assert lex_one( + '"""\n\n spans\n multiple\n' + ' lines\n\n """') == Token( + TokenKind.BLOCK_STRING, 0, 68, 1, 1, None, + 'spans\n multiple\n lines') + + def lex_reports_useful_block_string_errors(): + assert_syntax_error('"""', 'Unterminated string.', (1, 4)) + assert_syntax_error('"""no end quote', 'Unterminated string.', (1, 16)) + assert_syntax_error( + '"""contains unescaped \x07 control char"""', + "Invalid character within String: '\\x07'.", (1, 23)) + assert_syntax_error( + '"""null-byte is not \x00 end of file"""', + "Invalid character within String: '\\x00'.", (1, 21)) + + # noinspection PyArgumentEqualDefault + def lexes_numbers(): + assert lex_one('0') == Token(TokenKind.INT, 0, 1, 1, 1, None, '0') + assert lex_one('1') == Token(TokenKind.INT, 0, 1, 1, 1, None, '1') + assert lex_one('4') == Token(TokenKind.INT, 0, 1, 1, 1, None, '4') + assert lex_one('9') == Token(TokenKind.INT, 0, 1, 1, 1, None, '9') + assert lex_one('42') == Token(TokenKind.INT, 0, 2, 1, 1, None, '42') + assert lex_one('4.123') == Token( + TokenKind.FLOAT, 0, 5, 1, 1, None, '4.123') + assert lex_one('-4') == Token( + TokenKind.INT, 0, 2, 1, 1, None, '-4') + assert lex_one('-42') == Token( + TokenKind.INT, 0, 3, 1, 1, None, '-42') + assert lex_one('-4.123') == Token( + TokenKind.FLOAT, 0, 6, 1, 1, None, '-4.123') + assert lex_one('0.123') == Token( + TokenKind.FLOAT, 0, 5, 1, 1, None, '0.123') + assert lex_one('123e4') == Token( + TokenKind.FLOAT, 0, 5, 1, 1, None, '123e4') + assert lex_one('123E4') == Token( + TokenKind.FLOAT, 0, 5, 1, 1, None, '123E4') + assert lex_one('123e-4') == Token( + TokenKind.FLOAT, 0, 6, 1, 1, None, '123e-4') + assert lex_one('123e+4') == Token( + TokenKind.FLOAT, 0, 6, 1, 1, None, '123e+4') + assert lex_one('-1.123e4') == Token( + TokenKind.FLOAT, 0, 8, 1, 1, None, '-1.123e4') + assert lex_one('-1.123E4') == Token( + TokenKind.FLOAT, 0, 8, 1, 1, None, '-1.123E4') + assert lex_one('-1.123e-4') == Token( + TokenKind.FLOAT, 0, 9, 1, 1, None, '-1.123e-4') + assert lex_one('-1.123e+4') == Token( + TokenKind.FLOAT, 0, 9, 1, 1, None, '-1.123e+4') + assert lex_one('-1.123e4567') == Token( + TokenKind.FLOAT, 0, 11, 1, 1, None, '-1.123e4567') + + def lex_reports_useful_number_errors(): + assert_syntax_error( + '00', "Invalid number, unexpected digit after 0: '0'.", (1, 2)) + assert_syntax_error( + '+1', "Cannot parse the unexpected character '+'.", (1, 1)) + assert_syntax_error( + '1.', 'Invalid number, expected digit but got: .', (1, 3)) + assert_syntax_error( + '1.e1', "Invalid number, expected digit but got: 'e'.", (1, 3)) + assert_syntax_error( + '.123', "Cannot parse the unexpected character '.'", (1, 1)) + assert_syntax_error( + '1.A', "Invalid number, expected digit but got: 'A'.", (1, 3)) + assert_syntax_error( + '-A', "Invalid number, expected digit but got: 'A'.", (1, 2)) + assert_syntax_error( + '1.0e', 'Invalid number, expected digit but got: .', (1, 5)) + assert_syntax_error( + '1.0eA', "Invalid number, expected digit but got: 'A'.", (1, 5)) + + # noinspection PyArgumentEqualDefault + def lexes_punctuation(): + assert lex_one('!') == Token(TokenKind.BANG, 0, 1, 1, 1, None, None) + assert lex_one('$') == Token(TokenKind.DOLLAR, 0, 1, 1, 1, None, None) + assert lex_one('(') == Token(TokenKind.PAREN_L, 0, 1, 1, 1, None, None) + assert lex_one(')') == Token(TokenKind.PAREN_R, 0, 1, 1, 1, None, None) + assert lex_one('...') == Token( + TokenKind.SPREAD, 0, 3, 1, 1, None, None) + assert lex_one(':') == Token(TokenKind.COLON, 0, 1, 1, 1, None, None) + assert lex_one('=') == Token(TokenKind.EQUALS, 0, 1, 1, 1, None, None) + assert lex_one('@') == Token(TokenKind.AT, 0, 1, 1, 1, None, None) + assert lex_one('[') == Token( + TokenKind.BRACKET_L, 0, 1, 1, 1, None, None) + assert lex_one(']') == Token( + TokenKind.BRACKET_R, 0, 1, 1, 1, None, None) + assert lex_one('{') == Token(TokenKind.BRACE_L, 0, 1, 1, 1, None, None) + assert lex_one('}') == Token(TokenKind.BRACE_R, 0, 1, 1, 1, None, None) + assert lex_one('|') == Token(TokenKind.PIPE, 0, 1, 1, 1, None, None) + + def lex_reports_useful_unknown_character_error(): + assert_syntax_error( + '..', "Cannot parse the unexpected character '.'", (1, 1)) + assert_syntax_error( + '?', "Cannot parse the unexpected character '?'", (1, 1)) + assert_syntax_error( + '\u203B', "Cannot parse the unexpected character '\u203B'", + (1, 1)) + assert_syntax_error( + '\u200b', "Cannot parse the unexpected character '\\u200b'", + (1, 1)) + + # noinspection PyArgumentEqualDefault + def lex_reports_useful_information_for_dashes_in_names(): + q = 'a-b' + lexer = Lexer(Source(q)) + first_token = lexer.advance() + assert first_token == Token(TokenKind.NAME, 0, 1, 1, 1, None, 'a') + with raises(GraphQLSyntaxError) as exc_info: + lexer.advance() + error = exc_info.value + assert error.message == ( + "Syntax Error: Invalid number, expected digit but got: 'b'.") + assert error.locations == [(1, 3)] + + def produces_double_linked_list_of_tokens_including_comments(): + lexer = Lexer(Source('{\n #comment\n field\n }')) + start_token = lexer.token + while True: + end_token = lexer.advance() + if end_token.kind == TokenKind.EOF: + break + assert end_token.kind != TokenKind.COMMENT + assert start_token.prev is None + assert end_token.next is None + tokens = [] + tok = start_token + while tok: + assert not tokens or tok.prev == tokens[-1] + tokens.append(tok) + tok = tok.next + assert [tok.kind.value for tok in tokens] == [ + '', '{', 'Comment', 'Name', '}', ''] diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py new file mode 100644 index 00000000..636d99bc --- /dev/null +++ b/tests/language/test_parser.py @@ -0,0 +1,435 @@ +from typing import cast + +from pytest import raises + +from graphql.pyutils import dedent +from graphql.error import GraphQLSyntaxError +from graphql.language import ( + ArgumentNode, DefinitionNode, DocumentNode, + FieldNode, IntValueNode, ListTypeNode, ListValueNode, NameNode, + NamedTypeNode, NonNullTypeNode, NullValueNode, OperationDefinitionNode, + OperationType, SelectionSetNode, StringValueNode, ValueNode, + Token, parse, parse_type, parse_value, Source) + +# noinspection PyUnresolvedReferences +from . import kitchen_sink # noqa: F401 + + +def assert_syntax_error(text, message, location): + with raises(GraphQLSyntaxError) as exc_info: + parse(text) + error = exc_info.value + assert message in error.message + assert error.locations == [location] + + +def describe_parser(): + + def asserts_that_a_source_to_parse_was_provided(): + with raises(TypeError) as exc_info: + # noinspection PyArgumentList + assert parse() + msg = str(exc_info.value) + assert 'missing' in msg + assert 'source' in msg + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + assert parse(None) + msg = str(exc_info.value) + assert 'Must provide Source. Received: None' in msg + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + assert parse({}) + msg = str(exc_info.value) + assert 'Must provide Source. Received: {}' in msg + + def parse_provides_useful_errors(): + with raises(GraphQLSyntaxError) as exc_info: + parse('{') + error = exc_info.value + assert error.message == 'Syntax Error: Expected Name, found ' + assert error.positions == [1] + assert error.locations == [(1, 2)] + assert str(error) == dedent(""" + Syntax Error: Expected Name, found + + GraphQL request (1:2) + 1: { + ^ + """) + assert_syntax_error( + '\n { ...MissingOn }\n fragment MissingOn Type', + "Expected 'on', found Name 'Type'", (3, 26)) + assert_syntax_error('{ field: {} }', 'Expected Name, found {', (1, 10)) + assert_syntax_error( + 'notanoperation Foo { field }', + "Unexpected Name 'notanoperation'", (1, 1)) + assert_syntax_error('...', 'Unexpected ...', (1, 1)) + + def parse_provides_useful_error_when_using_source(): + with raises(GraphQLSyntaxError) as exc_info: + parse(Source('query', 'MyQuery.graphql')) + error = exc_info.value + assert str(error) == ( + 'Syntax Error: Expected {, found \n\n' + 'MyQuery.graphql (1:6)\n1: query\n ^\n') + + def parses_variable_inline_values(): + parse('{ field(complex: { a: { b: [ $var ] } }) }') + + def parses_constant_default_values(): + assert_syntax_error( + 'query Foo($x: Complex = { a: { b: [ $var ] } }) { field }', + 'Unexpected $', (1, 37)) + + def does_not_accept_fragments_named_on(): + assert_syntax_error( + 'fragment on on on { on }', "Unexpected Name 'on'", (1, 10)) + + def does_not_accept_fragments_spread_of_on(): + assert_syntax_error('{ ...on }', 'Expected Name, found }', (1, 9)) + + def parses_multi_byte_characters(): + # Note: \u0A0A could be naively interpreted as two line-feed chars. + doc = parse(""" + # This comment has a \u0A0A multi-byte character. + { field(arg: "Has a \u0A0A multi-byte character.") } + """) + definitions = doc.definitions + assert isinstance(definitions, list) + assert len(definitions) == 1 + selection_set = cast( + OperationDefinitionNode, definitions[0]).selection_set + selections = selection_set.selections + assert isinstance(selections, list) + assert len(selections) == 1 + arguments = cast(FieldNode, selections[0]).arguments + assert isinstance(arguments, list) + assert len(arguments) == 1 + value = arguments[0].value + assert isinstance(value, StringValueNode) + assert value.value == 'Has a \u0A0A multi-byte character.' + + # noinspection PyShadowingNames + def parses_kitchen_sink(kitchen_sink): # noqa: F811 + parse(kitchen_sink) + + def allows_non_keywords_anywhere_a_name_is_allowed(): + non_keywords = ('on', 'fragment', 'query', 'mutation', 'subscription', + 'true', 'false') + for keyword in non_keywords: + # You can't define or reference a fragment named `on`. + fragment_name = 'a' if keyword == 'on' else keyword + document = f""" + query {keyword} {{ + ... {fragment_name} + ... on {keyword} {{ field }} + }} + fragment {fragment_name} on Type {{ + {keyword}({keyword}: ${keyword}) + @{keyword}({keyword}: {keyword}) + }} + """ + parse(document) + + def parses_anonymous_mutation_operations(): + parse(""" + mutation { + mutationField + } + """) + + def parses_anonymous_subscription_operations(): + parse(""" + subscription { + subscriptionField + } + """) + + def parses_named_mutation_operations(): + parse(""" + mutation Foo { + mutationField + } + """) + + def parses_named_subscription_operations(): + parse(""" + subscription Foo { + subscriptionField + } + """) + + def creates_ast(): + doc = parse(dedent(""" + { + node(id: 4) { + id, + name + } + } + """)) + assert isinstance(doc, DocumentNode) + assert doc.loc == (0, 41) + definitions = doc.definitions + assert isinstance(definitions, list) + assert len(definitions) == 1 + definition = cast(OperationDefinitionNode, definitions[0]) + assert isinstance(definition, DefinitionNode) + assert definition.loc == (0, 40) + assert definition.operation == OperationType.QUERY + assert definition.name is None + assert definition.variable_definitions == [] + assert definition.directives == [] + selection_set = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + assert selection_set.loc == (0, 40) + selections = selection_set.selections + assert isinstance(selections, list) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + assert field.loc == (4, 38) + assert field.alias is None + name = field.name + assert isinstance(name, NameNode) + assert name.loc == (4, 8) + assert name.value == 'node' + arguments = field.arguments + assert isinstance(arguments, list) + assert len(arguments) == 1 + argument = arguments[0] + assert isinstance(argument, ArgumentNode) + name = argument.name + assert isinstance(name, NameNode) + assert name.loc == (9, 11) + assert name.value == 'id' + value = argument.value + assert isinstance(value, ValueNode) + assert isinstance(value, IntValueNode) + assert value.loc == (13, 14) + assert value.value == '4' + assert argument.loc == (9, 14) + assert field.directives == [] + selection_set = field.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, list) + assert len(selections) == 2 + field = selections[0] + assert isinstance(field, FieldNode) + assert field.loc == (22, 24) + assert field.alias is None + name = field.name + assert isinstance(name, NameNode) + assert name.loc == (22, 24) + assert name.value == 'id' + assert field.arguments == [] + assert field.directives == [] + assert field.selection_set is None + field = selections[0] + assert isinstance(field, FieldNode) + assert field.loc == (22, 24) + assert field.alias is None + name = field.name + assert isinstance(name, NameNode) + assert name.loc == (22, 24) + assert name.value == 'id' + assert field.arguments == [] + assert field.directives == [] + assert field.selection_set is None + field = selections[1] + assert isinstance(field, FieldNode) + assert field.loc == (30, 34) + assert field.alias is None + name = field.name + assert isinstance(name, NameNode) + assert name.loc == (30, 34) + assert name.value == 'name' + assert field.arguments == [] + assert field.directives == [] + assert field.selection_set is None + + def creates_ast_from_nameless_query_without_variables(): + doc = parse(dedent(""" + query { + node { + id + } + } + """)) + assert isinstance(doc, DocumentNode) + assert doc.loc == (0, 30) + definitions = doc.definitions + assert isinstance(definitions, list) + assert len(definitions) == 1 + definition = definitions[0] + assert isinstance(definition, OperationDefinitionNode) + assert definition.loc == (0, 29) + assert definition.operation == OperationType.QUERY + assert definition.name is None + assert definition.variable_definitions == [] + assert definition.directives == [] + selection_set = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + assert selection_set.loc == (6, 29) + selections = selection_set.selections + assert isinstance(selections, list) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + assert field.loc == (10, 27) + assert field.alias is None + name = field.name + assert isinstance(name, NameNode) + assert name.loc == (10, 14) + assert name.value == 'node' + assert field.arguments == [] + assert field.directives == [] + selection_set = field.selection_set + assert isinstance(selection_set, SelectionSetNode) + assert selection_set.loc == (15, 27) + selections = selection_set.selections + assert isinstance(selections, list) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + assert field.loc == (21, 23) + assert field.alias is None + name = field.name + assert isinstance(name, NameNode) + assert name.loc == (21, 23) + assert name.value == 'id' + assert field.arguments == [] + assert field.directives == [] + assert field.selection_set is None + + def allows_parsing_without_source_location_information(): + result = parse('{ id }', no_location=True) + assert result.loc is None + + def experimental_allows_parsing_fragment_defined_variables(): + document = 'fragment a($v: Boolean = false) on t { f(v: $v) }' + parse(document, experimental_fragment_variables=True) + with raises(GraphQLSyntaxError): + parse(document) + + def contains_location_information_that_only_stringifies_start_end(): + result = parse('{ id }') + assert str(result.loc) == '0:6' + + def contains_references_to_source(): + source = Source('{ id }') + result = parse(source) + assert result.loc.source is source + + def contains_references_to_start_and_end_tokens(): + result = parse('{ id }') + start_token = result.loc.start_token + assert isinstance(start_token, Token) + assert start_token.desc == '' + end_token = result.loc.end_token + assert isinstance(end_token, Token) + assert end_token.desc == '' + + +def describe_parse_value(): + + def parses_null_value(): + result = parse_value('null') + assert isinstance(result, NullValueNode) + assert result.loc == (0, 4) + + def parses_list_values(): + result = parse_value('[123 "abc"]') + assert isinstance(result, ListValueNode) + assert result.loc == (0, 11) + values = result.values + assert isinstance(values, list) + assert len(values) == 2 + value = values[0] + assert isinstance(value, IntValueNode) + assert value.loc == (1, 4) + assert value.value == '123' + value = values[1] + assert isinstance(value, StringValueNode) + assert value.loc == (5, 10) + assert value.value == 'abc' + + def parses_block_strings(): + result = parse_value('["""long""" "short"]') + assert isinstance(result, ListValueNode) + assert result.loc == (0, 20) + values = result.values + assert isinstance(values, list) + assert len(values) == 2 + value = values[0] + assert isinstance(value, StringValueNode) + assert value.loc == (1, 11) + assert value.value == 'long' + assert value.block is True + value = values[1] + assert isinstance(value, StringValueNode) + assert value.loc == (12, 19) + assert value.value == 'short' + assert value.block is False + + +def describe_parse_type(): + + def parses_well_known_types(): + result = parse_type('String') + assert isinstance(result, NamedTypeNode) + assert result.loc == (0, 6) + name = result.name + assert isinstance(name, NameNode) + assert name.loc == (0, 6) + assert name.value == 'String' + + def parses_custom_types(): + result = parse_type('MyType') + assert isinstance(result, NamedTypeNode) + assert result.loc == (0, 6) + name = result.name + assert isinstance(name, NameNode) + assert name.loc == (0, 6) + assert name.value == 'MyType' + + def parses_list_types(): + result = parse_type('[MyType]') + assert isinstance(result, ListTypeNode) + assert result.loc == (0, 8) + type_ = result.type + assert isinstance(type_, NamedTypeNode) + assert type_.loc == (1, 7) + name = type_.name + assert isinstance(name, NameNode) + assert name.loc == (1, 7) + assert name.value == 'MyType' + + def parses_non_null_types(): + result = parse_type('MyType!') + assert isinstance(result, NonNullTypeNode) + assert result.loc == (0, 7) + type_ = result.type + assert isinstance(type_, NamedTypeNode) + assert type_.loc == (0, 6) + name = type_.name + assert isinstance(name, NameNode) + assert name.loc == (0, 6) + assert name.value == 'MyType' + + def parses_nested_types(): + result = parse_type('[MyType!]') + assert isinstance(result, ListTypeNode) + assert result.loc == (0, 9) + type_ = result.type + assert isinstance(type_, NonNullTypeNode) + assert type_.loc == (1, 8) + type_ = type_.type + assert isinstance(type_, NamedTypeNode) + assert type_.loc == (1, 7) + name = type_.name + assert isinstance(name, NameNode) + assert name.loc == (1, 7) + assert name.value == 'MyType' diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py new file mode 100644 index 00000000..c5320e32 --- /dev/null +++ b/tests/language/test_printer.py @@ -0,0 +1,162 @@ +from copy import deepcopy + +from pytest import raises + +from graphql.pyutils import dedent +from graphql.language import FieldNode, NameNode, parse, print_ast + +# noinspection PyUnresolvedReferences +from . import kitchen_sink # noqa: F401 + + +def describe_printer_query_document(): + + # noinspection PyShadowingNames + def does_not_alter_ast(kitchen_sink): # noqa: F811 + ast = parse(kitchen_sink) + ast_before = deepcopy(ast) + print_ast(ast) + assert ast == ast_before + + def prints_minimal_ast(): + ast = FieldNode(name=NameNode(value='foo')) + assert print_ast(ast) == 'foo' + + def produces_helpful_error_messages(): + bad_ast = {'random': 'Data'} + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + print_ast(bad_ast) + msg = str(exc_info.value) + assert msg == "Not an AST Node: {'random': 'Data'}" + + def correctly_prints_query_operation_without_name(): + query_ast_shorthanded = parse('query { id, name }') + assert print_ast(query_ast_shorthanded) == '{\n id\n name\n}\n' + + def correctly_prints_mutation_operation_without_name(): + mutation_ast = parse('mutation { id, name }') + assert print_ast(mutation_ast) == 'mutation {\n id\n name\n}\n' + + def correctly_prints_query_operation_with_artifacts(): + query_ast_with_artifacts = parse( + 'query ($foo: TestType) @testDirective { id, name }') + assert print_ast(query_ast_with_artifacts) == dedent(""" + query ($foo: TestType) @testDirective { + id + name + } + """) + + def correctly_prints_mutation_operation_with_artifacts(): + mutation_ast_with_artifacts = parse( + 'mutation ($foo: TestType) @testDirective { id, name }') + assert print_ast(mutation_ast_with_artifacts) == dedent(""" + mutation ($foo: TestType) @testDirective { + id + name + } + """) + + +def describe_block_string(): + + def correctly_prints_single_line_block_strings_with_leading_space(): + ast_with_artifacts = parse('{ field(arg: """ space-led value""") }') + assert print_ast(ast_with_artifacts) == dedent(''' + { + field(arg: """ space-led value""") + } + ''') + + def correctly_prints_string_with_a_first_line_indentation(): + source = ''' + { + field(arg: """ + first + line + indentation + """) + } + ''' + ast_with_artifacts = parse(source) + assert print_ast(ast_with_artifacts) == dedent(source) + + def correctly_prints_single_line_with_leading_space_and_quotation(): + source = ''' + { + field(arg: """ space-led value "quoted string" + """) + } + ''' + ast_with_artifacts = parse(source) + assert print_ast(ast_with_artifacts) == dedent(source) + + def experimental_correctly_prints_fragment_defined_variables(): + source = """ + fragment Foo($a: ComplexType, $b: Boolean = false) on TestType { + id + } + """ + fragment_with_variable = parse( + source, experimental_fragment_variables=True) + assert print_ast(fragment_with_variable) == dedent(source) + + # noinspection PyShadowingNames + def prints_kitchen_sink(kitchen_sink): # noqa: F811 + ast = parse(kitchen_sink) + printed = print_ast(ast) + assert printed == dedent(r''' + query queryName($foo: ComplexType, $site: Site = MOBILE) { + whoever123is: node(id: [123, 456]) { + id + ... on User @defer { + field2 { + id + alias: field1(first: 10, after: $foo) @include(if: $foo) { + id + ...frag + } + } + } + ... @skip(unless: $foo) { + id + } + ... { + id + } + } + } + + mutation likeStory { + like(story: 123) @defer { + story { + id + } + } + } + + subscription StoryLikeSubscription($input: StoryLikeSubscribeInput) { + storyLikeSubscribe(input: $input) { + story { + likers { + count + } + likeSentence { + text + } + } + } + } + + fragment frag on Friend { + foo(size: $size, bar: $b, obj: {key: "value", block: """ + block string uses \""" + """}) + } + + { + unnamed(truthy: true, falsey: false, nullish: null) + query + } + ''') # noqa diff --git a/tests/language/test_schema_parser.py b/tests/language/test_schema_parser.py new file mode 100644 index 00000000..d60c180a --- /dev/null +++ b/tests/language/test_schema_parser.py @@ -0,0 +1,447 @@ +from textwrap import dedent + +from pytest import raises + +from graphql.error import GraphQLSyntaxError +from graphql.language import ( + BooleanValueNode, DocumentNode, EnumTypeDefinitionNode, + EnumValueDefinitionNode, FieldDefinitionNode, + InputObjectTypeDefinitionNode, InputValueDefinitionNode, + InterfaceTypeDefinitionNode, ListTypeNode, NameNode, NamedTypeNode, + NonNullTypeNode, ObjectTypeDefinitionNode, + ObjectTypeExtensionNode, OperationType, OperationTypeDefinitionNode, + ScalarTypeDefinitionNode, SchemaExtensionNode, DirectiveNode, + StringValueNode, UnionTypeDefinitionNode, parse) + + +def assert_syntax_error(text, message, location): + with raises(GraphQLSyntaxError) as exc_info: + parse(text) + error = exc_info.value + assert message in error.message + assert error.locations == [location] + + +def assert_definitions(body, loc, num=1): + doc = parse(body) + assert isinstance(doc, DocumentNode) + assert doc.loc == loc + definitions = doc.definitions + assert isinstance(definitions, list) + assert len(definitions) == num + return definitions[0] if num == 1 else definitions + + +def type_node(name, loc): + return NamedTypeNode(name=name_node(name, loc), loc=loc) + + +def name_node(name, loc): + return NameNode(value=name, loc=loc) + + +def field_node(name, type_, loc): + return field_node_with_args(name, type_, [], loc) + + +def field_node_with_args(name, type_, args, loc): + return FieldDefinitionNode( + name=name, arguments=args, type=type_, directives=[], loc=loc, + description=None) + + +def non_null_type(type_, loc): + return NonNullTypeNode(type=type_, loc=loc) + + +def enum_value_node(name, loc): + return EnumValueDefinitionNode( + name=name_node(name, loc), directives=[], loc=loc, + description=None) + + +def input_value_node(name, type_, default_value, loc): + return InputValueDefinitionNode( + name=name, type=type_, default_value=default_value, directives=[], + loc=loc, description=None) + + +def boolean_value_node(value, loc): + return BooleanValueNode(value=value, loc=loc) + + +def list_type_node(type_, loc): + return ListTypeNode(type=type_, loc=loc) + + +def schema_extension_node(directives, operation_types, loc): + return SchemaExtensionNode( + directives=directives, operation_types=operation_types, loc=loc) + + +def operation_type_definition(operation, type_, loc): + return OperationTypeDefinitionNode( + operation=operation, type=type_, loc=loc) + + +def directive_node(name, arguments, loc): + return DirectiveNode(name=name, arguments=arguments, loc=loc) + + +def describe_schema_parser(): + + def simple_type(): + body = '\ntype Hello {\n world: String\n}' + definition = assert_definitions(body, (0, 31)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (6, 11)) + assert definition.description is None + assert definition.interfaces == [] + assert definition.directives == [] + assert definition.fields == [field_node( + name_node('world', (16, 21)), + type_node('String', (23, 29)), (16, 29))] + assert definition.loc == (1, 31) + + def parses_type_with_description_string(): + body = '\n"Description"\ntype Hello {\n world: String\n}' + definition = assert_definitions(body, (0, 45)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (20, 25)) + description = definition.description + assert isinstance(description, StringValueNode) + assert description.value == 'Description' + assert description.block is False + assert description.loc == (1, 14) + + def parses_type_with_description_multi_line_string(): + body = dedent(''' + """ + Description + """ + # Even with comments between them + type Hello { + world: String + }''') + definition = assert_definitions(body, (0, 85)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (60, 65)) + description = definition.description + assert isinstance(description, StringValueNode) + assert description.value == 'Description' + assert description.block is True + assert description.loc == (1, 20) + + def simple_extension(): + body = '\nextend type Hello {\n world: String\n}\n' + extension = assert_definitions(body, (0, 39)) + assert isinstance(extension, ObjectTypeExtensionNode) + assert extension.name == name_node('Hello', (13, 18)) + assert extension.interfaces == [] + assert extension.directives == [] + assert extension.fields == [field_node( + name_node('world', (23, 28)), + type_node('String', (30, 36)), (23, 36))] + assert extension.loc == (1, 38) + + def extension_without_fields(): + body = 'extend type Hello implements Greeting' + extension = assert_definitions(body, (0, 37)) + assert isinstance(extension, ObjectTypeExtensionNode) + assert extension.name == name_node('Hello', (12, 17)) + assert extension.interfaces == [type_node('Greeting', (29, 37))] + assert extension.directives == [] + assert extension.fields == [] + assert extension.loc == (0, 37) + + def extension_without_fields_followed_by_extension(): + body = ('\n extend type Hello implements Greeting\n\n' + ' extend type Hello implements SecondGreeting\n ') + extensions = assert_definitions(body, (0, 100), 2) + extension = extensions[0] + assert isinstance(extension, ObjectTypeExtensionNode) + assert extension.name == name_node('Hello', (19, 24)) + assert extension.interfaces == [type_node('Greeting', (36, 44))] + assert extension.directives == [] + assert extension.fields == [] + assert extension.loc == (7, 44) + extension = extensions[1] + assert isinstance(extension, ObjectTypeExtensionNode) + assert extension.name == name_node('Hello', (64, 69)) + assert extension.interfaces == [type_node('SecondGreeting', (81, 95))] + assert extension.directives == [] + assert extension.fields == [] + assert extension.loc == (52, 95) + + def extension_without_anything_throws(): + assert_syntax_error('extend type Hello', 'Unexpected ', (1, 18)) + + def extension_do_not_include_descriptions(): + assert_syntax_error(""" + "Description" + extend type Hello { + world: String + }""", "Unexpected Name 'extend'", (3, 13)) + assert_syntax_error(""" + extend "Description" type Hello { + world: String + }""", "Unexpected String 'Description'", (2, 18)) + + def schema_extension(): + body = """ + extend schema { + mutation: Mutation + }""" + doc = parse(body) + assert isinstance(doc, DocumentNode) + assert doc.loc == (0, 75) + assert doc.definitions == [schema_extension_node( + [], [operation_type_definition(OperationType.MUTATION, type_node( + 'Mutation', (53, 61)), (43, 61))], (13, 75))] + + def schema_extension_with_only_directives(): + body = 'extend schema @directive' + doc = parse(body) + assert isinstance(doc, DocumentNode) + assert doc.loc == (0, 24) + assert doc.definitions == [schema_extension_node( + [directive_node(name_node('directive', (15, 24)), [], (14, 24))], + [], (0, 24))] + + def schema_extension_without_anything_throws(): + assert_syntax_error('extend schema', 'Unexpected ', (1, 14)) + + def simple_non_null_type(): + body = '\ntype Hello {\n world: String!\n}' + definition = assert_definitions(body, (0, 32)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (6, 11)) + assert definition.description is None + assert definition.interfaces == [] + assert definition.directives == [] + assert definition.fields == [field_node( + name_node('world', (16, 21)), + non_null_type(type_node('String', (23, 29)), (23, 30)), (16, 30))] + assert definition.loc == (1, 32) + + def simple_type_inheriting_interface(): + body = 'type Hello implements World { field: String }' + definition = assert_definitions(body, (0, 45)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (5, 10)) + assert definition.description is None + assert definition.interfaces == [type_node('World', (22, 27))] + assert definition.directives == [] + assert definition.fields == [field_node( + name_node('field', (30, 35)), + type_node('String', (37, 43)), (30, 43))] + assert definition.loc == (0, 45) + + def simple_type_inheriting_multiple_interfaces(): + body = 'type Hello implements Wo & rld { field: String }' + definition = assert_definitions(body, (0, 48)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (5, 10)) + assert definition.description is None + assert definition.interfaces == [ + type_node('Wo', (22, 24)), type_node('rld', (27, 30))] + assert definition.directives == [] + assert definition.fields == [field_node( + name_node('field', (33, 38)), + type_node('String', (40, 46)), (33, 46))] + assert definition.loc == (0, 48) + + def simple_type_inheriting_multiple_interfaces_with_leading_ampersand(): + body = 'type Hello implements & Wo & rld { field: String }' + definition = assert_definitions(body, (0, 50)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (5, 10)) + assert definition.description is None + assert definition.interfaces == [ + type_node('Wo', (24, 26)), type_node('rld', (29, 32))] + assert definition.directives == [] + assert definition.fields == [field_node( + name_node('field', (35, 40)), + type_node('String', (42, 48)), (35, 48))] + assert definition.loc == (0, 50) + + def single_value_enum(): + body = 'enum Hello { WORLD }' + definition = assert_definitions(body, (0, 20)) + assert isinstance(definition, EnumTypeDefinitionNode) + assert definition.name == name_node('Hello', (5, 10)) + assert definition.description is None + assert definition.directives == [] + assert definition.values == [enum_value_node('WORLD', (13, 18))] + assert definition.loc == (0, 20) + + def double_value_enum(): + body = 'enum Hello { WO, RLD }' + definition = assert_definitions(body, (0, 22)) + assert isinstance(definition, EnumTypeDefinitionNode) + assert definition.name == name_node('Hello', (5, 10)) + assert definition.description is None + assert definition.directives == [] + assert definition.values == [ + enum_value_node('WO', (13, 15)), + enum_value_node('RLD', (17, 20))] + assert definition.loc == (0, 22) + + def simple_interface(): + body = '\ninterface Hello {\n world: String\n}' + definition = assert_definitions(body, (0, 36)) + assert isinstance(definition, InterfaceTypeDefinitionNode) + assert definition.name == name_node('Hello', (11, 16)) + assert definition.description is None + assert definition.directives == [] + assert definition.fields == [field_node( + name_node('world', (21, 26)), + type_node('String', (28, 34)), (21, 34))] + assert definition.loc == (1, 36) + + def simple_field_with_arg(): + body = '\ntype Hello {\n world(flag: Boolean): String\n}' + definition = assert_definitions(body, (0, 46)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (6, 11)) + assert definition.description is None + assert definition.interfaces == [] + assert definition.directives == [] + assert definition.fields == [field_node_with_args( + name_node('world', (16, 21)), + type_node('String', (38, 44)), [input_value_node( + name_node('flag', (22, 26)), + type_node('Boolean', (28, 35)), None, (22, 35))], (16, 44))] + assert definition.loc == (1, 46) + + def simple_field_with_arg_with_default_value(): + body = '\ntype Hello {\n world(flag: Boolean = true): String\n}' + definition = assert_definitions(body, (0, 53)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (6, 11)) + assert definition.description is None + assert definition.interfaces == [] + assert definition.directives == [] + assert definition.fields == [field_node_with_args( + name_node('world', (16, 21)), + type_node('String', (45, 51)), [input_value_node( + name_node('flag', (22, 26)), + type_node('Boolean', (28, 35)), + boolean_value_node(True, (38, 42)), (22, 42))], (16, 51))] + assert definition.loc == (1, 53) + + def simple_field_with_list_arg(): + body = '\ntype Hello {\n world(things: [String]): String\n}' + definition = assert_definitions(body, (0, 49)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (6, 11)) + assert definition.description is None + assert definition.interfaces == [] + assert definition.directives == [] + assert definition.fields == [field_node_with_args( + name_node('world', (16, 21)), + type_node('String', (41, 47)), [input_value_node( + name_node('things', (22, 28)), + list_type_node(type_node('String', (31, 37)), (30, 38)), + None, (22, 38))], (16, 47))] + assert definition.loc == (1, 49) + + def simple_field_with_two_args(): + body = ('\ntype Hello {\n' + ' world(argOne: Boolean, argTwo: Int): String\n}') + definition = assert_definitions(body, (0, 61)) + assert isinstance(definition, ObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (6, 11)) + assert definition.description is None + assert definition.interfaces == [] + assert definition.directives == [] + assert definition.fields == [field_node_with_args( + name_node('world', (16, 21)), + type_node('String', (53, 59)), [ + input_value_node( + name_node('argOne', (22, 28)), + type_node('Boolean', (30, 37)), None, (22, 37)), + input_value_node( + name_node('argTwo', (39, 45)), + type_node('Int', (47, 50)), None, (39, 50))], (16, 59))] + assert definition.loc == (1, 61) + + def simple_union(): + body = 'union Hello = World' + definition = assert_definitions(body, (0, 19)) + assert isinstance(definition, UnionTypeDefinitionNode) + assert definition.name == name_node('Hello', (6, 11)) + assert definition.description is None + assert definition.directives == [] + assert definition.types == [type_node('World', (14, 19))] + assert definition.loc == (0, 19) + + def union_with_two_types(): + body = 'union Hello = Wo | Rld' + definition = assert_definitions(body, (0, 22)) + assert isinstance(definition, UnionTypeDefinitionNode) + assert definition.name == name_node('Hello', (6, 11)) + assert definition.description is None + assert definition.directives == [] + assert definition.types == [ + type_node('Wo', (14, 16)), type_node('Rld', (19, 22))] + assert definition.loc == (0, 22) + + def union_with_two_types_and_leading_pipe(): + body = 'union Hello = | Wo | Rld' + definition = assert_definitions(body, (0, 24)) + assert isinstance(definition, UnionTypeDefinitionNode) + assert definition.name == name_node('Hello', (6, 11)) + assert definition.directives == [] + assert definition.types == [ + type_node('Wo', (16, 18)), type_node('Rld', (21, 24))] + assert definition.loc == (0, 24) + + def union_fails_with_no_types(): + assert_syntax_error('union Hello = |', + 'Expected Name, found ', (1, 16)) + + def union_fails_with_leading_double_pipe(): + assert_syntax_error('union Hello = || Wo | Rld', + 'Expected Name, found |', (1, 16)) + + def union_fails_with_trailing_pipe(): + assert_syntax_error('union Hello = | Wo | Rld |', + 'Expected Name, found ', (1, 27)) + + def scalar(): + body = 'scalar Hello' + definition = assert_definitions(body, (0, 12)) + assert isinstance(definition, ScalarTypeDefinitionNode) + assert definition.name == name_node('Hello', (7, 12)) + assert definition.description is None + assert definition.directives == [] + assert definition.loc == (0, 12) + + def simple_input_object(): + body = '\ninput Hello {\n world: String\n}' + definition = assert_definitions(body, (0, 32)) + assert isinstance(definition, InputObjectTypeDefinitionNode) + assert definition.name == name_node('Hello', (7, 12)) + assert definition.description is None + assert definition.directives == [] + assert definition.fields == [input_value_node( + name_node('world', (17, 22)), + type_node('String', (24, 30)), None, (17, 30))] + assert definition.loc == (1, 32) + + def simple_input_object_with_args_should_fail(): + assert_syntax_error('\ninput Hello {\n world(foo : Int): String\n}', + 'Expected :, found (', (3, 8)) + + def directive_with_incorrect_locations(): + assert_syntax_error('\ndirective @foo on FIELD | INCORRECT_LOCATION', + "Unexpected Name 'INCORRECT_LOCATION'", (2, 27)) + + def disallow_legacy_sdl_empty_fields_supports_type_with_empty_fields(): + assert_syntax_error('type Hello { }', + 'Syntax Error: Expected Name, found }', (1, 14)) + + def disallow_legacy_sdl_implements_interfaces(): + assert_syntax_error('type Hello implements Wo rld { field: String }', + "Syntax Error: Unexpected Name 'rld'", (1, 26)) diff --git a/tests/language/test_schema_printer.py b/tests/language/test_schema_printer.py new file mode 100644 index 00000000..fce3a9ea --- /dev/null +++ b/tests/language/test_schema_printer.py @@ -0,0 +1,160 @@ +from copy import deepcopy + +from pytest import raises + +from graphql.language import ( + ScalarTypeDefinitionNode, NameNode, print_ast, parse) +from graphql.pyutils import dedent + +# noinspection PyUnresolvedReferences +from . import schema_kitchen_sink as kitchen_sink # noqa: F401 + + +def describe_printer_sdl_document(): + + def prints_minimal_ast(): + node = ScalarTypeDefinitionNode(name=NameNode(value='foo')) + assert print_ast(node) == 'scalar foo' + + def produces_helpful_error_messages(): + bad_ast1 = {'random': 'Data'} + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + print_ast(bad_ast1) + msg = str(exc_info.value) + assert msg == "Not an AST Node: {'random': 'Data'}" + + # noinspection PyShadowingNames + def does_not_alter_ast(kitchen_sink): # noqa: F811 + ast = parse(kitchen_sink) + ast_copy = deepcopy(ast) + print_ast(ast) + assert ast == ast_copy + + # noinspection PyShadowingNames + def prints_kitchen_sink(kitchen_sink): # noqa: F811 + ast = parse(kitchen_sink) + printed = print_ast(ast) + + assert printed == dedent(''' + schema { + query: QueryType + mutation: MutationType + } + + """ + This is a description + of the `Foo` type. + """ + type Foo implements Bar & Baz { + one: Type + """ + This is a description of the `two` field. + """ + two( + """ + This is a description of the `argument` argument. + """ + argument: InputType! + ): Type + three(argument: InputType, other: String): Int + four(argument: String = "string"): String + five(argument: [String] = ["string", "string"]): String + six(argument: InputType = {key: "value"}): Type + seven(argument: Int = null): Type + } + + type AnnotatedObject @onObject(arg: "value") { + annotatedField(arg: Type = "default" @onArg): Type @onField + } + + type UndefinedType + + extend type Foo { + seven(argument: [String]): Type + } + + extend type Foo @onType + + interface Bar { + one: Type + four(argument: String = "string"): String + } + + interface AnnotatedInterface @onInterface { + annotatedField(arg: Type @onArg): Type @onField + } + + interface UndefinedInterface + + extend interface Bar { + two(argument: InputType!): Type + } + + extend interface Bar @onInterface + + union Feed = Story | Article | Advert + + union AnnotatedUnion @onUnion = A | B + + union AnnotatedUnionTwo @onUnion = A | B + + union UndefinedUnion + + extend union Feed = Photo | Video + + extend union Feed @onUnion + + scalar CustomScalar + + scalar AnnotatedScalar @onScalar + + extend scalar CustomScalar @onScalar + + enum Site { + DESKTOP + MOBILE + } + + enum AnnotatedEnum @onEnum { + ANNOTATED_VALUE @onEnumValue + OTHER_VALUE + } + + enum UndefinedEnum + + extend enum Site { + VR + } + + extend enum Site @onEnum + + input InputType { + key: String! + answer: Int = 42 + } + + input AnnotatedInput @onInputObject { + annotatedField: Type @onField + } + + input UndefinedInput + + extend input InputType { + other: Float = 1.23e4 + } + + extend input InputType @onInputObject + + directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + + directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + + directive @include2(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + + extend schema @onSchema + + extend schema @onSchema { + subscription: SubscriptionType + } + ''') # noqa diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py new file mode 100644 index 00000000..be5407cc --- /dev/null +++ b/tests/language/test_visitor.py @@ -0,0 +1,1348 @@ +from copy import copy + +from pytest import fail + +from graphql.language import ( + Node, FieldNode, NameNode, SelectionSetNode, parse, print_ast, + visit, BREAK, REMOVE, SKIP, ParallelVisitor, TypeInfoVisitor, Visitor) +from graphql.type import get_named_type, is_composite_type +from graphql.utilities import TypeInfo + +from ..validation.harness import test_schema +# noinspection PyUnresolvedReferences +from . import kitchen_sink # noqa: F401 + + +def get_node_by_path(ast, path): + result = ast + for key in path: + if isinstance(key, int): + assert isinstance(result, list) + try: + result = result[key] + except IndexError: + fail(f'invalid index {key} in node list {result}') + elif isinstance(key, str): + assert isinstance(result, Node) + try: + result = getattr(result, key) + except AttributeError: + fail(f'invalid key {key} in node {result}') + else: + fail(f'invalid key {key!r} in path {path}') + return result + + +def check_visitor_fn_args( + ast, node, key, parent, path, ancestors, is_edited=False): + assert isinstance(node, Node) + + is_root = key is None + if is_root: + if not is_edited: + assert node is ast + assert parent is None + assert path == [] + assert ancestors == [] + return + + assert isinstance(key, (int, str)) + assert get_node_by_path(parent, [key]) is not None + assert isinstance(path, list) + assert path[-1] == key + assert isinstance(ancestors, list) + assert len(ancestors) == len(path) - 1 + + if not is_edited: + assert get_node_by_path(parent, [key]) is node + assert get_node_by_path(ast, path) is node + for i, ancestor in enumerate(ancestors): + ancestor_path = path[:i] + assert ancestor == get_node_by_path(ast, ancestor_path) + + +def describe_visitor(): + + def validates_path_argument(): + ast = parse('{ a }', no_location=True) + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + visited.append(['enter', *args[3]]) + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + visited.append(['leave', *args[3]]) + + visit(ast, TestVisitor()) + assert visited == [ + ['enter'], + ['enter', 'definitions', 0], + ['enter', 'definitions', 0, 'selection_set'], + ['enter', 'definitions', 0, 'selection_set', 'selections', 0], + ['enter', + 'definitions', 0, 'selection_set', 'selections', 0, 'name'], + ['leave', + 'definitions', 0, 'selection_set', 'selections', 0, 'name'], + ['leave', 'definitions', 0, 'selection_set', 'selections', 0], + ['leave', 'definitions', 0, 'selection_set'], + ['leave', 'definitions', 0], + ['leave']] + + def validates_ancestors_argument(): + ast = parse('{ a }', no_location=True) + visited_nodes = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, node, key, parent, path, ancestors): + in_array = isinstance(key, int) + if in_array: + visited_nodes.append(parent) + visited_nodes.append(node) + expected_ancestors = visited_nodes[0:-2] + assert ancestors == expected_ancestors + + def leave(self, node, key, parent, path, ancestors): + expected_ancestors = visited_nodes[0:-2] + assert ancestors == expected_ancestors + in_array = isinstance(key, int) + if in_array: + visited_nodes.pop() + visited_nodes.pop() + + visit(ast, TestVisitor()) + + def allows_editing_a_node_both_on_enter_and_on_leave(): + ast = parse('{ a, b, c { a, b, c } }', no_location=True) + visited = [] + + class TestVisitor(Visitor): + selection_set = None + + def enter_operation_definition(self, *args): + check_visitor_fn_args(ast, *args) + node = copy(args[0]) + assert len(node.selection_set.selections) == 3 + self.selection_set = node.selection_set + node.selection_set = SelectionSetNode(selections=[]) + visited.append('enter') + return node + + def leave_operation_definition(self, *args): + check_visitor_fn_args(ast, *args, is_edited=True) + node = copy(args[0]) + assert not node.selection_set.selections + node.selection_set = self.selection_set + visited.append('leave') + return node + + edited_ast = visit(ast, TestVisitor()) + assert edited_ast == ast + assert visited == ['enter', 'leave'] + + def allows_for_editing_on_enter(): + ast = parse('{ a, b, c { a, b, c } }', no_location=True) + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + if isinstance(node, FieldNode) and node.name.value == 'b': + return REMOVE + + edited_ast = visit(ast, TestVisitor()) + assert ast == parse('{ a, b, c { a, b, c } }', no_location=True) + assert edited_ast == parse('{ a, c { a, c } }', no_location=True) + + def allows_for_editing_on_leave(): + ast = parse('{ a, b, c { a, b, c } }', no_location=True) + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def leave(self, *args): + check_visitor_fn_args(ast, *args, is_edited=True) + node = args[0] + if isinstance(node, FieldNode) and node.name.value == 'b': + return REMOVE + + edited_ast = visit(ast, TestVisitor()) + assert ast == parse('{ a, b, c { a, b, c } }', no_location=True) + assert edited_ast == parse('{ a, c { a, c } }', no_location=True) + + def visits_edited_node(): + ast = parse('{ a { x } }', no_location=True) + added_field = FieldNode(name=NameNode(value='__typename')) + + class TestVisitor(Visitor): + did_visit_added_field = False + + def enter(self, *args): + check_visitor_fn_args(ast, *args, is_edited=True) + node = args[0] + if isinstance(node, FieldNode) and node.name.value == 'a': + node = copy(node) + # noinspection PyTypeChecker + node.selection_set.selections = [ + added_field] + node.selection_set.selections + return node + if node == added_field: + self.did_visit_added_field = True + + visitor = TestVisitor() + visit(ast, visitor) + assert visitor.did_visit_added_field + + def allows_skipping_a_sub_tree(): + ast = parse('{ a, b { x }, c }', no_location=True) + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + if kind == 'field' and node.name.value == 'b': + return SKIP + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + + visit(ast, TestVisitor()) + assert visited == [ + ['enter', 'document', None], + ['enter', 'operation_definition', None], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'c'], + ['leave', 'name', 'c'], + ['leave', 'field', None], + ['leave', 'selection_set', None], + ['leave', 'operation_definition', None], + ['leave', 'document', None]] + + def allows_early_exit_while_visiting(): + ast = parse('{ a, b { x }, c }', no_location=True) + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + if kind == 'name' and node.value == 'x': + return BREAK + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + + visit(ast, TestVisitor()) + assert visited == [ + ['enter', 'document', None], + ['enter', 'operation_definition', None], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'b'], + ['leave', 'name', 'b'], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'x']] + + def allows_early_exit_while_leaving(): + ast = parse('{ a, b { x }, c }', no_location=True) + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + if kind == 'name' and node.value == 'x': + return BREAK + + visit(ast, TestVisitor()) + assert visited == [ + ['enter', 'document', None], + ['enter', 'operation_definition', None], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'b'], + ['leave', 'name', 'b'], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'x'], + ['leave', 'name', 'x']] + + def allows_a_named_functions_visitor_api(): + ast = parse('{ a, b { x }, c }', no_location=True) + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter_name(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + + def enter_selection_set(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + + def leave_selection_set(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + + visit(ast, TestVisitor()) + assert visited == [ + ['enter', 'selection_set', None], + ['enter', 'name', 'a'], + ['enter', 'name', 'b'], + ['enter', 'selection_set', None], + ['enter', 'name', 'x'], + ['leave', 'selection_set', None], + ['enter', 'name', 'c'], + ['leave', 'selection_set', None]] + + def experimental_visits_variables_defined_in_fragments(): + ast = parse('fragment a($v: Boolean = false) on t { f }', + no_location=True, experimental_fragment_variables=True) + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + + visit(ast, TestVisitor()) + assert visited == [ + ['enter', 'document', None], + ['enter', 'fragment_definition', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['enter', 'variable_definition', None], + ['enter', 'variable', None], + ['enter', 'name', 'v'], + ['leave', 'name', 'v'], + ['leave', 'variable', None], + ['enter', 'named_type', None], + ['enter', 'name', 'Boolean'], + ['leave', 'name', 'Boolean'], + ['leave', 'named_type', None], + ['enter', 'boolean_value', False], + ['leave', 'boolean_value', False], + ['leave', 'variable_definition', None], + ['enter', 'named_type', None], + ['enter', 'name', 't'], + ['leave', 'name', 't'], + ['leave', 'named_type', None], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'f'], + ['leave', 'name', 'f'], + ['leave', 'field', None], + ['leave', 'selection_set', None], + ['leave', 'fragment_definition', None], + ['leave', 'document', None]] + + # noinspection PyShadowingNames + def visits_kitchen_sink(kitchen_sink): # noqa: F811 + ast = parse(kitchen_sink) + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node, key, parent = args[:3] + parent_kind = parent.kind if isinstance(parent, Node) else None + visited.append(['enter', node.kind, key, parent_kind]) + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node, key, parent = args[:3] + parent_kind = parent.kind if isinstance(parent, Node) else None + visited.append(['leave', node.kind, key, parent_kind]) + + visit(ast, TestVisitor()) + assert visited == [ + ['enter', 'document', None, None], + ['enter', 'operation_definition', 0, None], + ['enter', 'name', 'name', 'operation_definition'], + ['leave', 'name', 'name', 'operation_definition'], + ['enter', 'variable_definition', 0, None], + ['enter', 'variable', 'variable', 'variable_definition'], + ['enter', 'name', 'name', 'variable'], + ['leave', 'name', 'name', 'variable'], + ['leave', 'variable', 'variable', 'variable_definition'], + ['enter', 'named_type', 'type', 'variable_definition'], + ['enter', 'name', 'name', 'named_type'], + ['leave', 'name', 'name', 'named_type'], + ['leave', 'named_type', 'type', 'variable_definition'], + ['leave', 'variable_definition', 0, None], + ['enter', 'variable_definition', 1, None], + ['enter', 'variable', 'variable', 'variable_definition'], + ['enter', 'name', 'name', 'variable'], + ['leave', 'name', 'name', 'variable'], + ['leave', 'variable', 'variable', 'variable_definition'], + ['enter', 'named_type', 'type', 'variable_definition'], + ['enter', 'name', 'name', 'named_type'], + ['leave', 'name', 'name', 'named_type'], + ['leave', 'named_type', 'type', 'variable_definition'], + ['enter', 'enum_value', 'default_value', 'variable_definition'], + ['leave', 'enum_value', 'default_value', 'variable_definition'], + ['leave', 'variable_definition', 1, None], + ['enter', 'selection_set', 'selection_set', + 'operation_definition'], + ['enter', 'field', 0, None], + ['enter', 'name', 'alias', 'field'], + ['leave', 'name', 'alias', 'field'], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'argument', 0, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'list_value', 'value', 'argument'], + ['enter', 'int_value', 0, None], + ['leave', 'int_value', 0, None], + ['enter', 'int_value', 1, None], + ['leave', 'int_value', 1, None], + ['leave', 'list_value', 'value', 'argument'], + ['leave', 'argument', 0, None], + ['enter', 'selection_set', 'selection_set', 'field'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['leave', 'field', 0, None], + ['enter', 'inline_fragment', 1, None], + ['enter', 'named_type', 'type_condition', 'inline_fragment'], + ['enter', 'name', 'name', 'named_type'], + ['leave', 'name', 'name', 'named_type'], + ['leave', 'named_type', 'type_condition', 'inline_fragment'], + ['enter', 'directive', 0, None], + ['enter', 'name', 'name', 'directive'], + ['leave', 'name', 'name', 'directive'], + ['leave', 'directive', 0, None], + ['enter', 'selection_set', 'selection_set', 'inline_fragment'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'selection_set', 'selection_set', 'field'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['leave', 'field', 0, None], + ['enter', 'field', 1, None], + ['enter', 'name', 'alias', 'field'], + ['leave', 'name', 'alias', 'field'], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'argument', 0, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'int_value', 'value', 'argument'], + ['leave', 'int_value', 'value', 'argument'], + ['leave', 'argument', 0, None], + ['enter', 'argument', 1, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'variable', 'value', 'argument'], + ['enter', 'name', 'name', 'variable'], + ['leave', 'name', 'name', 'variable'], + ['leave', 'variable', 'value', 'argument'], + ['leave', 'argument', 1, None], + ['enter', 'directive', 0, None], + ['enter', 'name', 'name', 'directive'], + ['leave', 'name', 'name', 'directive'], + ['enter', 'argument', 0, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'variable', 'value', 'argument'], + ['enter', 'name', 'name', 'variable'], + ['leave', 'name', 'name', 'variable'], + ['leave', 'variable', 'value', 'argument'], + ['leave', 'argument', 0, None], + ['leave', 'directive', 0, None], + ['enter', 'selection_set', 'selection_set', 'field'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['leave', 'field', 0, None], + ['enter', 'fragment_spread', 1, None], + ['enter', 'name', 'name', 'fragment_spread'], + ['leave', 'name', 'name', 'fragment_spread'], + ['leave', 'fragment_spread', 1, None], + ['leave', 'selection_set', 'selection_set', 'field'], + ['leave', 'field', 1, None], + ['leave', 'selection_set', 'selection_set', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', 'inline_fragment'], + ['leave', 'inline_fragment', 1, None], + ['enter', 'inline_fragment', 2, None], + ['enter', 'directive', 0, None], + ['enter', 'name', 'name', 'directive'], + ['leave', 'name', 'name', 'directive'], + ['enter', 'argument', 0, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'variable', 'value', 'argument'], + ['enter', 'name', 'name', 'variable'], + ['leave', 'name', 'name', 'variable'], + ['leave', 'variable', 'value', 'argument'], + ['leave', 'argument', 0, None], + ['leave', 'directive', 0, None], + ['enter', 'selection_set', 'selection_set', 'inline_fragment'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', 'inline_fragment'], + ['leave', 'inline_fragment', 2, None], + ['enter', 'inline_fragment', 3, None], + ['enter', 'selection_set', 'selection_set', 'inline_fragment'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', 'inline_fragment'], + ['leave', 'inline_fragment', 3, None], + ['leave', 'selection_set', 'selection_set', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', + 'operation_definition'], + ['leave', 'operation_definition', 0, None], + ['enter', 'operation_definition', 1, None], + ['enter', 'name', 'name', 'operation_definition'], + ['leave', 'name', 'name', 'operation_definition'], + ['enter', 'selection_set', 'selection_set', + 'operation_definition'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'argument', 0, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'int_value', 'value', 'argument'], + ['leave', 'int_value', 'value', 'argument'], + ['leave', 'argument', 0, None], + ['enter', 'directive', 0, None], + ['enter', 'name', 'name', 'directive'], + ['leave', 'name', 'name', 'directive'], + ['leave', 'directive', 0, None], + ['enter', 'selection_set', 'selection_set', 'field'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'selection_set', 'selection_set', 'field'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', + 'operation_definition'], + ['leave', 'operation_definition', 1, None], + ['enter', 'operation_definition', 2, None], + ['enter', 'name', 'name', 'operation_definition'], + ['leave', 'name', 'name', 'operation_definition'], + ['enter', 'variable_definition', 0, None], + ['enter', 'variable', 'variable', 'variable_definition'], + ['enter', 'name', 'name', 'variable'], + ['leave', 'name', 'name', 'variable'], + ['leave', 'variable', 'variable', 'variable_definition'], + ['enter', 'named_type', 'type', 'variable_definition'], + ['enter', 'name', 'name', 'named_type'], + ['leave', 'name', 'name', 'named_type'], + ['leave', 'named_type', 'type', 'variable_definition'], + ['leave', 'variable_definition', 0, None], + ['enter', 'selection_set', 'selection_set', + 'operation_definition'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'argument', 0, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'variable', 'value', 'argument'], + ['enter', 'name', 'name', 'variable'], + ['leave', 'name', 'name', 'variable'], + ['leave', 'variable', 'value', 'argument'], + ['leave', 'argument', 0, None], + ['enter', 'selection_set', 'selection_set', 'field'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'selection_set', 'selection_set', 'field'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'selection_set', 'selection_set', 'field'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', 'field'], + ['leave', 'field', 0, None], + ['enter', 'field', 1, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'selection_set', 'selection_set', 'field'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', 'field'], + ['leave', 'field', 1, None], + ['leave', 'selection_set', 'selection_set', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', 'field'], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', + 'operation_definition'], + ['leave', 'operation_definition', 2, None], + ['enter', 'fragment_definition', 3, None], + ['enter', 'name', 'name', 'fragment_definition'], + ['leave', 'name', 'name', 'fragment_definition'], + ['enter', 'named_type', 'type_condition', + 'fragment_definition'], + ['enter', 'name', 'name', 'named_type'], + ['leave', 'name', 'name', 'named_type'], + ['leave', 'named_type', 'type_condition', + 'fragment_definition'], + ['enter', 'selection_set', 'selection_set', + 'fragment_definition'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'argument', 0, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'variable', 'value', 'argument'], + ['enter', 'name', 'name', 'variable'], + ['leave', 'name', 'name', 'variable'], + ['leave', 'variable', 'value', 'argument'], + ['leave', 'argument', 0, None], + ['enter', 'argument', 1, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'variable', 'value', 'argument'], + ['enter', 'name', 'name', 'variable'], + ['leave', 'name', 'name', 'variable'], + ['leave', 'variable', 'value', 'argument'], + ['leave', 'argument', 1, None], + ['enter', 'argument', 2, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'object_value', 'value', 'argument'], + ['enter', 'object_field', 0, None], + ['enter', 'name', 'name', 'object_field'], + ['leave', 'name', 'name', 'object_field'], + ['enter', 'string_value', 'value', 'object_field'], + ['leave', 'string_value', 'value', 'object_field'], + ['leave', 'object_field', 0, None], + ['enter', 'object_field', 1, None], + ['enter', 'name', 'name', 'object_field'], + ['leave', 'name', 'name', 'object_field'], + ['enter', 'string_value', 'value', 'object_field'], + ['leave', 'string_value', 'value', 'object_field'], + ['leave', 'object_field', 1, None], + ['leave', 'object_value', 'value', 'argument'], + ['leave', 'argument', 2, None], + ['leave', 'field', 0, None], + ['leave', 'selection_set', 'selection_set', + 'fragment_definition'], + ['leave', 'fragment_definition', 3, None], + ['enter', 'operation_definition', 4, None], + ['enter', 'selection_set', 'selection_set', + 'operation_definition'], + ['enter', 'field', 0, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['enter', 'argument', 0, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'boolean_value', 'value', 'argument'], + ['leave', 'boolean_value', 'value', 'argument'], + ['leave', 'argument', 0, None], + ['enter', 'argument', 1, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'boolean_value', 'value', 'argument'], + ['leave', 'boolean_value', 'value', 'argument'], + ['leave', 'argument', 1, None], + ['enter', 'argument', 2, None], + ['enter', 'name', 'name', 'argument'], + ['leave', 'name', 'name', 'argument'], + ['enter', 'null_value', 'value', 'argument'], + ['leave', 'null_value', 'value', 'argument'], + ['leave', 'argument', 2, None], + ['leave', 'field', 0, None], + ['enter', 'field', 1, None], + ['enter', 'name', 'name', 'field'], + ['leave', 'name', 'name', 'field'], + ['leave', 'field', 1, None], + ['leave', 'selection_set', 'selection_set', + 'operation_definition'], + ['leave', 'operation_definition', 4, None], + ['leave', 'document', None, None]] + + +def describe_visit_in_parallel(): + + def allows_skipping_a_sub_tree(): + # Note: nearly identical to the above test but using ParallelVisitor + ast = parse('{ a, b { x }, c }') + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + if kind == 'field' and node.name.value == 'b': + return SKIP + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + + visit(ast, ParallelVisitor([TestVisitor()])) + assert visited == [ + ['enter', 'document', None], + ['enter', 'operation_definition', None], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'c'], + ['leave', 'name', 'c'], + ['leave', 'field', None], + ['leave', 'selection_set', None], + ['leave', 'operation_definition', None], + ['leave', 'document', None]] + + def allows_skipping_different_sub_trees(): + ast = parse('{ a { x }, b { y} }') + visited = [] + + class TestVisitor(Visitor): + + def __init__(self, name): + self.name = name + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + name = self.name + visited.append([f'no-{name}', 'enter', kind, value]) + if kind == 'field' and node.name.value == name: + return SKIP + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + name = self.name + visited.append([f'no-{name}', 'leave', kind, value]) + + visit(ast, ParallelVisitor([TestVisitor('a'), TestVisitor('b')])) + assert visited == [ + ['no-a', 'enter', 'document', None], + ['no-b', 'enter', 'document', None], + ['no-a', 'enter', 'operation_definition', None], + ['no-b', 'enter', 'operation_definition', None], + ['no-a', 'enter', 'selection_set', None], + ['no-b', 'enter', 'selection_set', None], + ['no-a', 'enter', 'field', None], + ['no-b', 'enter', 'field', None], + ['no-b', 'enter', 'name', 'a'], + ['no-b', 'leave', 'name', 'a'], + ['no-b', 'enter', 'selection_set', None], + ['no-b', 'enter', 'field', None], + ['no-b', 'enter', 'name', 'x'], + ['no-b', 'leave', 'name', 'x'], + ['no-b', 'leave', 'field', None], + ['no-b', 'leave', 'selection_set', None], + ['no-b', 'leave', 'field', None], + ['no-a', 'enter', 'field', None], + ['no-b', 'enter', 'field', None], + ['no-a', 'enter', 'name', 'b'], + ['no-a', 'leave', 'name', 'b'], + ['no-a', 'enter', 'selection_set', None], + ['no-a', 'enter', 'field', None], + ['no-a', 'enter', 'name', 'y'], + ['no-a', 'leave', 'name', 'y'], + ['no-a', 'leave', 'field', None], + ['no-a', 'leave', 'selection_set', None], + ['no-a', 'leave', 'field', None], + ['no-a', 'leave', 'selection_set', None], + ['no-b', 'leave', 'selection_set', None], + ['no-a', 'leave', 'operation_definition', None], + ['no-b', 'leave', 'operation_definition', None], + ['no-a', 'leave', 'document', None], + ['no-b', 'leave', 'document', None]] + + def allows_early_exit_while_visiting(): + # Note: nearly identical to the above test but using ParallelVisitor. + ast = parse('{ a, b { x }, c }') + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + if kind == 'name' and node.value == 'x': + return BREAK + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + + visit(ast, ParallelVisitor([TestVisitor()])) + assert visited == [ + ['enter', 'document', None], + ['enter', 'operation_definition', None], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'b'], + ['leave', 'name', 'b'], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'x']] + + def allows_early_exit_from_different_points(): + ast = parse('{ a { y }, b { x } }') + visited = [] + + class TestVisitor(Visitor): + + def __init__(self, name): + self.name = name + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + name = self.name + visited.append([f'break-{name}', 'enter', kind, value]) + if kind == 'name' and node.value == name: + return BREAK + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + name = self.name + visited.append([f'break-{name}', 'leave', kind, value]) + + visit(ast, ParallelVisitor([TestVisitor('a'), TestVisitor('b')])) + assert visited == [ + ['break-a', 'enter', 'document', None], + ['break-b', 'enter', 'document', None], + ['break-a', 'enter', 'operation_definition', None], + ['break-b', 'enter', 'operation_definition', None], + ['break-a', 'enter', 'selection_set', None], + ['break-b', 'enter', 'selection_set', None], + ['break-a', 'enter', 'field', None], + ['break-b', 'enter', 'field', None], + ['break-a', 'enter', 'name', 'a'], + ['break-b', 'enter', 'name', 'a'], + ['break-b', 'leave', 'name', 'a'], + ['break-b', 'enter', 'selection_set', None], + ['break-b', 'enter', 'field', None], + ['break-b', 'enter', 'name', 'y'], + ['break-b', 'leave', 'name', 'y'], + ['break-b', 'leave', 'field', None], + ['break-b', 'leave', 'selection_set', None], + ['break-b', 'leave', 'field', None], + ['break-b', 'enter', 'field', None], + ['break-b', 'enter', 'name', 'b']] + + def allows_early_exit_while_leaving(): + # Note: nearly identical to the above test but using ParallelVisitor. + ast = parse('{ a, b { x }, c }') + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + if kind == 'name' and node.value == 'x': + return BREAK + + visit(ast, ParallelVisitor([TestVisitor()])) + assert visited == [ + ['enter', 'document', None], + ['enter', 'operation_definition', None], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'b'], + ['leave', 'name', 'b'], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'x'], + ['leave', 'name', 'x']] + + def allows_early_exit_from_leaving_different_points(): + ast = parse('{ a { y }, b { x } }') + visited = [] + + class TestVisitor(Visitor): + + def __init__(self, name): + self.name = name + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + name = self.name + visited.append([f'break-{name}', 'enter', kind, value]) + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + name = self.name + visited.append([f'break-{name}', 'leave', kind, value]) + if kind == 'field' and node.name.value == name: + return BREAK + + visit(ast, ParallelVisitor([TestVisitor('a'), TestVisitor('b')])) + assert visited == [ + ['break-a', 'enter', 'document', None], + ['break-b', 'enter', 'document', None], + ['break-a', 'enter', 'operation_definition', None], + ['break-b', 'enter', 'operation_definition', None], + ['break-a', 'enter', 'selection_set', None], + ['break-b', 'enter', 'selection_set', None], + ['break-a', 'enter', 'field', None], + ['break-b', 'enter', 'field', None], + ['break-a', 'enter', 'name', 'a'], + ['break-b', 'enter', 'name', 'a'], + ['break-a', 'leave', 'name', 'a'], + ['break-b', 'leave', 'name', 'a'], + ['break-a', 'enter', 'selection_set', None], + ['break-b', 'enter', 'selection_set', None], + ['break-a', 'enter', 'field', None], + ['break-b', 'enter', 'field', None], + ['break-a', 'enter', 'name', 'y'], + ['break-b', 'enter', 'name', 'y'], + ['break-a', 'leave', 'name', 'y'], + ['break-b', 'leave', 'name', 'y'], + ['break-a', 'leave', 'field', None], + ['break-b', 'leave', 'field', None], + ['break-a', 'leave', 'selection_set', None], + ['break-b', 'leave', 'selection_set', None], + ['break-a', 'leave', 'field', None], + ['break-b', 'leave', 'field', None], + ['break-b', 'enter', 'field', None], + ['break-b', 'enter', 'name', 'b'], + ['break-b', 'leave', 'name', 'b'], + ['break-b', 'enter', 'selection_set', None], + ['break-b', 'enter', 'field', None], + ['break-b', 'enter', 'name', 'x'], + ['break-b', 'leave', 'name', 'x'], + ['break-b', 'leave', 'field', None], + ['break-b', 'leave', 'selection_set', None], + ['break-b', 'leave', 'field', None]] + + def allows_for_editing_on_enter(): + ast = parse('{ a, b, c { a, b, c } }', no_location=True) + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor1(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + if node.kind == 'field' and node.name.value == 'b': + return REMOVE + + # noinspection PyMethodMayBeStatic + class TestVisitor2(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + + def leave(self, *args): + check_visitor_fn_args(ast, *args, is_edited=True) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + + edited_ast = visit( + ast, ParallelVisitor([TestVisitor1(), TestVisitor2()])) + assert ast == parse('{ a, b, c { a, b, c } }', no_location=True) + assert edited_ast == parse('{ a, c { a, c } }', no_location=True) + assert visited == [ + ['enter', 'document', None], + ['enter', 'operation_definition', None], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'c'], + ['leave', 'name', 'c'], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'c'], + ['leave', 'name', 'c'], + ['leave', 'field', None], + ['leave', 'selection_set', None], + ['leave', 'field', None], + ['leave', 'selection_set', None], + ['leave', 'operation_definition', None], + ['leave', 'document', None]] + + def allows_for_editing_on_leave(): + ast = parse('{ a, b, c { a, b, c } }', no_location=True) + visited = [] + + # noinspection PyMethodMayBeStatic + class TestVisitor1(Visitor): + + def leave(self, *args): + check_visitor_fn_args(ast, *args, is_edited=True) + node = args[0] + if node.kind == 'field' and node.name.value == 'b': + return REMOVE + + # noinspection PyMethodMayBeStatic + class TestVisitor2(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['enter', kind, value]) + + def leave(self, *args): + check_visitor_fn_args(ast, *args, is_edited=True) + node = args[0] + kind, value = node.kind, getattr(node, 'value', None) + visited.append(['leave', kind, value]) + + edited_ast = visit( + ast, ParallelVisitor([TestVisitor1(), TestVisitor2()])) + assert ast == parse('{ a, b, c { a, b, c } }', no_location=True) + assert edited_ast == parse('{ a, c { a, c } }', no_location=True) + assert visited == [ + ['enter', 'document', None], + ['enter', 'operation_definition', None], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'b'], + ['leave', 'name', 'b'], + ['enter', 'field', None], + ['enter', 'name', 'c'], + ['leave', 'name', 'c'], + ['enter', 'selection_set', None], + ['enter', 'field', None], + ['enter', 'name', 'a'], + ['leave', 'name', 'a'], + ['leave', 'field', None], + ['enter', 'field', None], + ['enter', 'name', 'b'], + ['leave', 'name', 'b'], + ['enter', 'field', None], + ['enter', 'name', 'c'], + ['leave', 'name', 'c'], + ['leave', 'field', None], + ['leave', 'selection_set', None], + ['leave', 'field', None], + ['leave', 'selection_set', None], + ['leave', 'operation_definition', None], + ['leave', 'document', None]] + + +def describe_visit_with_type_info(): + + def maintains_type_info_during_visit(): + visited = [] + + ast = parse( + '{ human(id: 4) { name, pets { ... { name } }, unknown } }') + + type_info = TypeInfo(test_schema) + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args) + parent_type = type_info.get_parent_type() + type_ = type_info.get_type() + input_type = type_info.get_input_type() + node = args[0] + visited.append([ + 'enter', node.kind, + node.value if node.kind == 'name' else None, + str(parent_type) if parent_type else None, + str(type_) if type_ else None, + str(input_type) if input_type else None]) + + def leave(self, *args): + check_visitor_fn_args(ast, *args) + parent_type = type_info.get_parent_type() + type_ = type_info.get_type() + input_type = type_info.get_input_type() + node = args[0] + visited.append([ + 'leave', node.kind, + node.value if node.kind == 'name' else None, + str(parent_type) if parent_type else None, + str(type_) if type_ else None, + str(input_type) if input_type else None]) + + visit(ast, TypeInfoVisitor(type_info, TestVisitor())) + + assert visited == [ + ['enter', 'document', None, None, None, None], + ['enter', 'operation_definition', None, None, 'QueryRoot', None], + ['enter', 'selection_set', None, 'QueryRoot', 'QueryRoot', None], + ['enter', 'field', None, 'QueryRoot', 'Human', None], + ['enter', 'name', 'human', 'QueryRoot', 'Human', None], + ['leave', 'name', 'human', 'QueryRoot', 'Human', None], + ['enter', 'argument', None, 'QueryRoot', 'Human', 'ID'], + ['enter', 'name', 'id', 'QueryRoot', 'Human', 'ID'], + ['leave', 'name', 'id', 'QueryRoot', 'Human', 'ID'], + ['enter', 'int_value', None, 'QueryRoot', 'Human', 'ID'], + ['leave', 'int_value', None, 'QueryRoot', 'Human', 'ID'], + ['leave', 'argument', None, 'QueryRoot', 'Human', 'ID'], + ['enter', 'selection_set', None, 'Human', 'Human', None], + ['enter', 'field', None, 'Human', 'String', None], + ['enter', 'name', 'name', 'Human', 'String', None], + ['leave', 'name', 'name', 'Human', 'String', None], + ['leave', 'field', None, 'Human', 'String', None], + ['enter', 'field', None, 'Human', '[Pet]', None], + ['enter', 'name', 'pets', 'Human', '[Pet]', None], + ['leave', 'name', 'pets', 'Human', '[Pet]', None], + ['enter', 'selection_set', None, 'Pet', '[Pet]', None], + ['enter', 'inline_fragment', None, 'Pet', 'Pet', None], + ['enter', 'selection_set', None, 'Pet', 'Pet', None], + ['enter', 'field', None, 'Pet', 'String', None], + ['enter', 'name', 'name', 'Pet', 'String', None], + ['leave', 'name', 'name', 'Pet', 'String', None], + ['leave', 'field', None, 'Pet', 'String', None], + ['leave', 'selection_set', None, 'Pet', 'Pet', None], + ['leave', 'inline_fragment', None, 'Pet', 'Pet', None], + ['leave', 'selection_set', None, 'Pet', '[Pet]', None], + ['leave', 'field', None, 'Human', '[Pet]', None], + ['enter', 'field', None, 'Human', None, None], + ['enter', 'name', 'unknown', 'Human', None, None], + ['leave', 'name', 'unknown', 'Human', None, None], + ['leave', 'field', None, 'Human', None, None], + ['leave', 'selection_set', None, 'Human', 'Human', None], + ['leave', 'field', None, 'QueryRoot', 'Human', None], + ['leave', 'selection_set', None, 'QueryRoot', 'QueryRoot', None], + ['leave', 'operation_definition', None, None, 'QueryRoot', None], + ['leave', 'document', None, None, None, None], + ] + + def maintains_type_info_during_edit(): + visited = [] + type_info = TypeInfo(test_schema) + + ast = parse('{ human(id: 4) { name, pets }, alien }') + + # noinspection PyMethodMayBeStatic + class TestVisitor(Visitor): + + def enter(self, *args): + check_visitor_fn_args(ast, *args, is_edited=True) + parent_type = type_info.get_parent_type() + type_ = type_info.get_type() + input_type = type_info.get_input_type() + node = args[0] + visited.append([ + 'enter', node.kind, + node.value if node.kind == 'name' else None, + str(parent_type) if parent_type else None, + str(type_) if type_ else None, + str(input_type) if input_type else None]) + + # Make a query valid by adding missing selection sets. + if (node.kind == 'field' and not node.selection_set and + is_composite_type(get_named_type(type_))): + return FieldNode( + alias=node.alias, + name=node.name, + arguments=node.arguments, + directives=node.directives, + selection_set=SelectionSetNode(selections=[ + FieldNode(name=NameNode(value='__typename'))])) + + def leave(self, *args): + check_visitor_fn_args(ast, *args, is_edited=True) + parent_type = type_info.get_parent_type() + type_ = type_info.get_type() + input_type = type_info.get_input_type() + node = args[0] + visited.append([ + 'leave', node.kind, + node.value if node.kind == 'name' else None, + str(parent_type) if parent_type else None, + str(type_) if type_ else None, + str(input_type) if input_type else None]) + + edited_ast = visit(ast, TypeInfoVisitor(type_info, TestVisitor())) + + assert ast == parse('{ human(id: 4) { name, pets }, alien }') + + assert print_ast(edited_ast) == print_ast(parse( + '{ human(id: 4) { name, pets { __typename } },' + ' alien { __typename } }')) + + assert visited == [ + ['enter', 'document', None, None, None, None], + ['enter', 'operation_definition', None, None, 'QueryRoot', None], + ['enter', 'selection_set', None, 'QueryRoot', 'QueryRoot', None], + ['enter', 'field', None, 'QueryRoot', 'Human', None], + ['enter', 'name', 'human', 'QueryRoot', 'Human', None], + ['leave', 'name', 'human', 'QueryRoot', 'Human', None], + ['enter', 'argument', None, 'QueryRoot', 'Human', 'ID'], + ['enter', 'name', 'id', 'QueryRoot', 'Human', 'ID'], + ['leave', 'name', 'id', 'QueryRoot', 'Human', 'ID'], + ['enter', 'int_value', None, 'QueryRoot', 'Human', 'ID'], + ['leave', 'int_value', None, 'QueryRoot', 'Human', 'ID'], + ['leave', 'argument', None, 'QueryRoot', 'Human', 'ID'], + ['enter', 'selection_set', None, 'Human', 'Human', None], + ['enter', 'field', None, 'Human', 'String', None], + ['enter', 'name', 'name', 'Human', 'String', None], + ['leave', 'name', 'name', 'Human', 'String', None], + ['leave', 'field', None, 'Human', 'String', None], + ['enter', 'field', None, 'Human', '[Pet]', None], + ['enter', 'name', 'pets', 'Human', '[Pet]', None], + ['leave', 'name', 'pets', 'Human', '[Pet]', None], + ['enter', 'selection_set', None, 'Pet', '[Pet]', None], + ['enter', 'field', None, 'Pet', 'String!', None], + ['enter', 'name', '__typename', 'Pet', 'String!', None], + ['leave', 'name', '__typename', 'Pet', 'String!', None], + ['leave', 'field', None, 'Pet', 'String!', None], + ['leave', 'selection_set', None, 'Pet', '[Pet]', None], + ['leave', 'field', None, 'Human', '[Pet]', None], + ['leave', 'selection_set', None, 'Human', 'Human', None], + ['leave', 'field', None, 'QueryRoot', 'Human', None], + ['enter', 'field', None, 'QueryRoot', 'Alien', None], + ['enter', 'name', 'alien', 'QueryRoot', 'Alien', None], + ['leave', 'name', 'alien', 'QueryRoot', 'Alien', None], + ['enter', 'selection_set', None, 'Alien', 'Alien', None], + ['enter', 'field', None, 'Alien', 'String!', None], + ['enter', 'name', '__typename', 'Alien', 'String!', None], + ['leave', 'name', '__typename', 'Alien', 'String!', None], + ['leave', 'field', None, 'Alien', 'String!', None], + ['leave', 'selection_set', None, 'Alien', 'Alien', None], + ['leave', 'field', None, 'QueryRoot', 'Alien', None], + ['leave', 'selection_set', None, 'QueryRoot', 'QueryRoot', None], + ['leave', 'operation_definition', None, None, 'QueryRoot', None], + ['leave', 'document', None, None, None, None], + ] diff --git a/tests/pyutils/__init__.py b/tests/pyutils/__init__.py new file mode 100644 index 00000000..c1675f50 --- /dev/null +++ b/tests/pyutils/__init__.py @@ -0,0 +1 @@ +"""Tests for graphql.pyutils""" diff --git a/tests/pyutils/test_cached_property.py b/tests/pyutils/test_cached_property.py new file mode 100644 index 00000000..7b20ccd1 --- /dev/null +++ b/tests/pyutils/test_cached_property.py @@ -0,0 +1,31 @@ +from graphql.pyutils import cached_property + + +def describe_cached_property(): + + def works_like_a_normal_property(): + + class TestClass: + + @cached_property + def value(self): + return 42 + + assert TestClass().value == 42 + + def caches_the_value(): + + class TestClass: + evaluations = 0 + + @cached_property + def value(self): + self.__class__.evaluations += 1 + return 42 + + obj = TestClass() + assert TestClass.evaluations == 0 + assert obj.value == 42 + assert TestClass.evaluations == 1 + assert obj.value == 42 + assert TestClass.evaluations == 1 diff --git a/tests/pyutils/test_contain_subset.py b/tests/pyutils/test_contain_subset.py new file mode 100644 index 00000000..2db8be8c --- /dev/null +++ b/tests/pyutils/test_contain_subset.py @@ -0,0 +1,140 @@ +from datetime import date + +from pytest import fixture + +from graphql.pyutils import contain_subset + + +def describe_plain_object(): + + tested_object = {'a': 'b', 'c': 'd'} + + def should_pass_for_smaller_object(): + assert contain_subset(tested_object, {'a': 'b'}) + + def should_pass_for_same_object(): + assert contain_subset(tested_object, {'a': 'b', 'c': 'd'}) + + def should_pass_for_similar_but_not_the_same_object(): + assert not contain_subset(tested_object, {'a': 'notB', 'c': 'd'}) + + +def describe_complex_object(): + + tested_object = { + 'a': 'b', 'c': 'd', 'e': {'foo': 'bar', 'baz': {'qux': 'quux'}}} + + def should_pass_for_smaller_object_1(): + assert contain_subset(tested_object, {'a': 'b', 'e': {'foo': 'bar'}}) + + def should_pass_for_smaller_object_2(): + assert contain_subset( + tested_object, {'e': {'foo': 'bar', 'baz': {'qux': 'quux'}}}) + + def should_pass_for_same_object(): + assert contain_subset(tested_object, { + 'a': 'b', 'c': 'd', 'e': {'foo': 'bar', 'baz': {'qux': 'quux'}}}) + + def should_pass_for_similar_but_not_the_same_object(): + assert not contain_subset(tested_object, { + 'e': {'foo': 'bar', 'baz': {'qux': 'notAQuux'}}}) + + def should_fail_if_comparing_when_comparing_objects_to_dates(): + assert not contain_subset(tested_object, {'e': date.today()}) + + +def describe_circular_objects(): + + @fixture + def test_object(): + obj = {} + obj['arr'] = [obj, obj] + obj['arr'].append(obj['arr']) + obj['obj'] = obj + return obj + + # noinspection PyShadowingNames + def should_contain_subdocument(test_object): + assert contain_subset(test_object, { + 'arr': [ + {'arr': []}, + {'arr': []}, + [ + {'arr': []}, + {'arr': []} + ] + ]}) + + # noinspection PyShadowingNames + def should_not_contain_similar_object(test_object): + assert not contain_subset(test_object, { + 'arr': [ + {'arr': ['just random field']}, + {'arr': []}, + [ + {'arr': []}, + {'arr': []} + ] + ]}) + + +def describe_object_with_compare_function(): + + def should_pass_when_function_returns_true(): + assert contain_subset({'a': 5}, {'a': lambda a: a}) + + def should_fail_when_function_returns_false(): + assert not contain_subset({'a': 5}, {'a': lambda a: not a}) + + def should_pass_for_function_with_no_arguments(): + assert contain_subset({'a': 5}, {'a': lambda: True}) + + +def describe_comparison_of_non_objects(): + + def should_fail_if_actual_subset_is_null(): + assert not contain_subset(None, {'a': 1}) + + def should_fail_if_expected_subset_is_not_an_object(): + assert not contain_subset({'a': 1}, None) + + def should_not_fail_for_same_non_object_string_variables(): + assert contain_subset('string', 'string') + + +def describe_comparison_of_dates(): + + def should_pass_for_the_same_date(): + assert contain_subset(date(2015, 11, 30), date(2015, 11, 30)) + + def should_pass_for_the_same_date_if_nested(): + assert contain_subset( + {'a': date(2015, 11, 30)}, {'a': date(2015, 11, 30)}) + + def should_fail_for_a_different_date(): + assert not contain_subset(date(2015, 11, 30), date(2012, 2, 22)) + + def should_fail_for_a_different_date_if_nested(): + assert not contain_subset( + {'a': date(2015, 11, 30)}, {'a': date(2015, 2, 22)}) + + +def describe_cyclic_objects(): + + def should_pass(): + child = {} + parent = {'children': [child]} + child['parent'] = parent + + my_object = {'a': 1, 'b': 'two', 'c': parent} + assert contain_subset(my_object, {'a': 1, 'c': parent}) + + +def describe_list_objects(): + + test_list = [{'a': 'a', 'b': 'b'}, {'v': 'f', 'd': {'z': 'g'}}] + + def works_well_with_lists(): + assert contain_subset(test_list, [{'a': 'a'}]) + assert contain_subset(test_list, [{'a': 'a', 'b': 'b'}]) + assert not contain_subset(test_list, [{'a': 'a', 'b': 'bd'}]) diff --git a/tests/pyutils/test_convert_case.py b/tests/pyutils/test_convert_case.py new file mode 100644 index 00000000..adfc2d6f --- /dev/null +++ b/tests/pyutils/test_convert_case.py @@ -0,0 +1,53 @@ +from graphql.pyutils import camel_to_snake, snake_to_camel + + +def describe_camel_to_snake(): + + def converts_typical_names(): + result = camel_to_snake('CamelCase') + assert result == 'camel_case' + result = camel_to_snake('InputObjectTypeExtensionNode') + assert result == 'input_object_type_extension_node' + + def may_start_with_lowercase(): + result = camel_to_snake('CamelCase') + assert result == 'camel_case' + + def works_with_acronyms(): + result = camel_to_snake('SlowXMLParser') + assert result == 'slow_xml_parser' + result = camel_to_snake('FastGraphQLParser') + assert result == 'fast_graph_ql_parser' + + def keeps_already_snake(): + result = camel_to_snake('snake_case') + assert result == 'snake_case' + + +def describe_snake_to_camel(): + + def converts_typical_names(): + result = snake_to_camel('snake_case') + assert result == 'SnakeCase' + result = snake_to_camel('input_object_type_extension_node') + assert result == 'InputObjectTypeExtensionNode' + + def may_start_with_uppercase(): + result = snake_to_camel('Snake_case') + assert result == 'SnakeCase' + + def works_with_acronyms(): + result = snake_to_camel('slow_xml_parser') + assert result == 'SlowXmlParser' + result = snake_to_camel('fast_graph_ql_parser') + assert result == 'FastGraphQlParser' + + def keeps_already_camel(): + result = snake_to_camel('CamelCase') + assert result == 'CamelCase' + + def can_produce_lower_camel_case(): + result = snake_to_camel('snake_case', upper=False) + assert result == 'snakeCase' + result = snake_to_camel('input_object_type_extension_node', False) + assert result == 'inputObjectTypeExtensionNode' diff --git a/tests/pyutils/test_dedent.py b/tests/pyutils/test_dedent.py new file mode 100644 index 00000000..66981fff --- /dev/null +++ b/tests/pyutils/test_dedent.py @@ -0,0 +1,70 @@ +from graphql.pyutils import dedent + + +def describe_dedent(): + + def removes_indentation_in_typical_usage(): + assert dedent(""" + type Query { + me: User + } + + type User { + id: ID + name: String + } + """) == ( + 'type Query {\n me: User\n}\n\n' + + 'type User {\n id: ID\n name: String\n}\n') + + def removes_only_the_first_level_of_indentation(): + assert dedent(""" + qux + quux + quuux + quuuux + """) == 'qux\n quux\n quuux\n quuuux\n' + + def does_not_escape_special_characters(): + assert dedent(""" + type Root { + field(arg: String = "wi\th de\fault"): String + } + """) == ( + 'type Root {\n' + ' field(arg: String = "wi\th de\fault"): String\n}\n') + + def also_removes_indentation_using_tabs(): + assert dedent(""" + \t\t type Query { + \t\t me: User + \t\t } + """) == 'type Query {\n me: User\n}\n' + + def removes_leading_newlines(): + assert dedent(""" + + + type Query { + me: User + }""") == 'type Query {\n me: User\n}' + + def does_not_remove_trailing_newlines(): + assert dedent(""" + type Query { + me: User + } + + """) == 'type Query {\n me: User\n}\n\n' + + def removes_all_trailing_spaces_and_tabs(): + assert dedent(""" + type Query { + me: User + } + \t\t \t """) == 'type Query {\n me: User\n}\n' + + def works_on_text_without_leading_newline(): + assert dedent(""" type Query { + me: User + }""") == 'type Query {\n me: User\n}' diff --git a/tests/pyutils/test_event_emitter.py b/tests/pyutils/test_event_emitter.py new file mode 100644 index 00000000..e87915f2 --- /dev/null +++ b/tests/pyutils/test_event_emitter.py @@ -0,0 +1,103 @@ +from asyncio import sleep +from pytest import mark, raises + +from graphql.pyutils import EventEmitter, EventEmitterAsyncIterator + + +def describe_event_emitter(): + + def add_and_remove_listeners(): + emitter = EventEmitter() + + def listener1(value): + pass + + def listener2(value): + pass + + emitter.add_listener('foo', listener1) + emitter.add_listener('foo', listener2) + emitter.add_listener('bar', listener1) + assert emitter.listeners['foo'] == [listener1, listener2] + assert emitter.listeners['bar'] == [listener1] + emitter.remove_listener('foo', listener1) + assert emitter.listeners['foo'] == [listener2] + assert emitter.listeners['bar'] == [listener1] + emitter.remove_listener('foo', listener2) + assert emitter.listeners['foo'] == [] + assert emitter.listeners['bar'] == [listener1] + emitter.remove_listener('bar', listener1) + assert emitter.listeners['bar'] == [] + + def emit_sync(): + emitter = EventEmitter() + emitted = [] + + def listener(value): + emitted.append(value) + + emitter.add_listener('foo', listener) + assert emitter.emit('foo', 'bar') is True + assert emitted == ['bar'] + assert emitter.emit('bar', 'baz') is False + assert emitted == ['bar'] + + @mark.asyncio + async def emit_async(): + emitter = EventEmitter() + emitted = [] + + async def listener(value): + emitted.append(value) + + emitter.add_listener('foo', listener) + emitter.emit('foo', 'bar') + emitter.emit('bar', 'baz') + await sleep(0) + assert emitted == ['bar'] + + +def describe_event_emitter_async_iterator(): + + @mark.asyncio + async def subscribe_async_iterator_mock(): + # Create an AsyncIterator from an EventEmitter + emitter = EventEmitter() + iterator = EventEmitterAsyncIterator(emitter, 'publish') + + # Queue up publishes + assert emitter.emit('publish', 'Apple') is True + assert emitter.emit('publish', 'Banana') is True + + # Read payloads + assert await iterator.__anext__() == 'Apple' + assert await iterator.__anext__() == 'Banana' + + # Read ahead + i3 = iterator.__anext__() + i4 = iterator.__anext__() + + # Publish + assert emitter.emit('publish', 'Coconut') is True + assert emitter.emit('publish', 'Durian') is True + + # Await results + assert await i3 == 'Coconut' + assert await i4 == 'Durian' + + # Read ahead + i5 = iterator.__anext__() + + # Terminate emitter + await iterator.aclose() + + # Publish is not caught after terminate + assert emitter.emit('publish', 'Fig') is False + + # Find that cancelled read-ahead got a "done" result + with raises(StopAsyncIteration): + await i5 + + # And next returns empty completion value + with raises(StopAsyncIteration): + await iterator.__anext__() diff --git a/tests/pyutils/test_is_finite.py b/tests/pyutils/test_is_finite.py new file mode 100644 index 00000000..289f0ea4 --- /dev/null +++ b/tests/pyutils/test_is_finite.py @@ -0,0 +1,42 @@ +from math import inf, nan + +from graphql.error import INVALID +from graphql.pyutils import is_finite + + +def describe_is_finite(): + + def null_is_not_finite(): + assert is_finite(None) is False + + def booleans_are_finite(): + # since they are considered as integers 0 and 1 + assert is_finite(False) is True + assert is_finite(True) is True + + def strings_are_not_finite(): + assert is_finite('string') is False + + def ints_are_finite(): + assert is_finite(0) is True + assert is_finite(1) is True + assert is_finite(-1) is True + assert is_finite(1 >> 100) is True + + def floats_are_finite(): + assert is_finite(0.0) is True + assert is_finite(1.5) is True + assert is_finite(-1.5) is True + assert is_finite(1e100) is True + assert is_finite(-1e100) is True + assert is_finite(1e-100) is True + + def nan_is_not_finite(): + assert is_finite(nan) is False + + def inf_is_not_finite(): + assert is_finite(inf) is False + assert is_finite(-inf) is False + + def undefined_is_not_finite(): + assert is_finite(INVALID) is False diff --git a/tests/pyutils/test_is_integer.py b/tests/pyutils/test_is_integer.py new file mode 100644 index 00000000..a251cfb1 --- /dev/null +++ b/tests/pyutils/test_is_integer.py @@ -0,0 +1,70 @@ +from math import inf, nan + +from graphql.error import INVALID +from graphql.pyutils import is_integer + + +def describe_is_integer(): + + def null_is_not_integer(): + assert is_integer(None) is False + + def object_is_not_integer(): + assert is_integer(object()) is False + + def booleans_are_not_integer(): + assert is_integer(False) is False + assert is_integer(True) is False + + def strings_are_not_integer(): + assert is_integer('string') is False + + def ints_are_integer(): + assert is_integer(0) is True + assert is_integer(1) is True + assert is_integer(-1) is True + assert is_integer(42) is True + assert is_integer(1234567890) is True + assert is_integer(-1234567890) is True + assert is_integer(1 >> 100) is True + + def floats_with_fractional_part_are_not_integer(): + assert is_integer(0.5) is False + assert is_integer(1.5) is False + assert is_integer(-1.5) is False + assert is_integer(0.00001) is False + assert is_integer(-0.00001) is False + assert is_integer(1.00001) is False + assert is_integer(-1.00001) is False + assert is_integer(42.5) is False + assert is_integer(10000.1) is False + assert is_integer(-10000.1) is False + assert is_integer(1234567890.5) is False + assert is_integer(-1234567890.5) is False + + def floats_without_fractional_part_are_integer(): + assert is_integer(0.0) is True + assert is_integer(1.0) is True + assert is_integer(-1.0) is True + assert is_integer(10.0) is True + assert is_integer(-10.0) is True + assert is_integer(42.0) is True + assert is_integer(1234567890.0) is True + assert is_integer(-1234567890.0) is True + assert is_integer(1e100) is True + assert is_integer(-1e100) is True + + def complex_is_not_integer(): + assert is_integer(1j) is False + assert is_integer(-1j) is False + assert is_integer(42 + 1j) is False + + def nan_is_not_integer(): + assert is_integer(nan) is False + + def inf_is_not_integer(): + assert is_integer(inf) is False + assert is_integer(-inf) is False + + def undefined_is_not_integer(): + assert is_integer(INVALID) is False diff --git a/tests/pyutils/test_is_invalid.py b/tests/pyutils/test_is_invalid.py new file mode 100644 index 00000000..d39c12e2 --- /dev/null +++ b/tests/pyutils/test_is_invalid.py @@ -0,0 +1,32 @@ +from math import inf, nan + +from graphql.error import INVALID +from graphql.pyutils import is_invalid + + +def describe_is_invalid(): + + def null_is_not_invalid(): + assert is_invalid(None) is False + + def falsy_objects_are_not_invalid(): + assert is_invalid('') is False + assert is_invalid(0) is False + assert is_invalid([]) is False + assert is_invalid({}) is False + + def truthy_objects_are_not_invalid(): + assert is_invalid('str') is False + assert is_invalid(1) is False + assert is_invalid([0]) is False + assert is_invalid({None: None}) is False + + def inf_is_not_invalid(): + assert is_invalid(inf) is False + assert is_invalid(-inf) is False + + def undefined_is_invalid(): + assert is_invalid(INVALID) is True + + def nan_is_invalid(): + assert is_invalid(nan) is True diff --git a/tests/pyutils/test_is_nullish.py b/tests/pyutils/test_is_nullish.py new file mode 100644 index 00000000..0a2b8274 --- /dev/null +++ b/tests/pyutils/test_is_nullish.py @@ -0,0 +1,32 @@ +from math import inf, nan + +from graphql.error import INVALID +from graphql.pyutils import is_nullish + + +def describe_is_nullish(): + + def null_is_nullish(): + assert is_nullish(None) is True + + def falsy_objects_are_not_nullish(): + assert is_nullish('') is False + assert is_nullish(0) is False + assert is_nullish([]) is False + assert is_nullish({}) is False + + def truthy_objects_are_not_nullish(): + assert is_nullish('str') is False + assert is_nullish(1) is False + assert is_nullish([0]) is False + assert is_nullish({None: None}) is False + + def inf_is_not_nullish(): + assert is_nullish(inf) is False + assert is_nullish(-inf) is False + + def undefined_is_nullish(): + assert is_nullish(INVALID) is True + + def nan_is_nullish(): + assert is_nullish(nan) diff --git a/tests/pyutils/test_or_list.py b/tests/pyutils/test_or_list.py new file mode 100644 index 00000000..840d67dc --- /dev/null +++ b/tests/pyutils/test_or_list.py @@ -0,0 +1,29 @@ +from pytest import raises + +from graphql.pyutils import or_list + + +def describe_or_list(): + + def returns_none_for_empty_list(): + with raises(TypeError): + or_list([]) + + def prints_list_with_one_item(): + assert or_list(['one']) == 'one' + + def prints_list_with_two_items(): + assert or_list(['one', 'two']) == 'one or two' + + def prints_list_with_three_items(): + assert or_list(['A', 'B', 'C']) == 'A, B or C' + assert or_list(['one', 'two', 'three']) == 'one, two or three' + + def prints_list_with_five_items(): + assert or_list(['A', 'B', 'C', 'D', 'E']) == 'A, B, C, D or E' + + def prints_shortened_list_with_six_items(): + assert or_list(['A', 'B', 'C', 'D', 'E', 'F']) == 'A, B, C, D or E' + + def prints_tuple_with_three_items(): + assert or_list(('A', 'B', 'C')) == 'A, B or C' diff --git a/tests/pyutils/test_quoted_or_list.py b/tests/pyutils/test_quoted_or_list.py new file mode 100644 index 00000000..842fb28e --- /dev/null +++ b/tests/pyutils/test_quoted_or_list.py @@ -0,0 +1,23 @@ +from pytest import raises + +from graphql.pyutils import quoted_or_list + + +def describe_quoted_or_list(): + + def does_not_accept_an_empty_list(): + with raises(TypeError): + quoted_or_list([]) + + def returns_single_quoted_item(): + assert quoted_or_list(['A']) == "'A'" + + def returns_two_item_list(): + assert quoted_or_list(['A', 'B']) == "'A' or 'B'" + + def returns_comma_separated_many_item_list(): + assert quoted_or_list(['A', 'B', 'C']) == "'A', 'B' or 'C'" + + def limits_to_five_items(): + assert quoted_or_list( + ['A', 'B', 'C', 'D', 'E', 'F']) == "'A', 'B', 'C', 'D' or 'E'" diff --git a/tests/pyutils/test_suggesion_list.py b/tests/pyutils/test_suggesion_list.py new file mode 100644 index 00000000..19428099 --- /dev/null +++ b/tests/pyutils/test_suggesion_list.py @@ -0,0 +1,22 @@ +from graphql.pyutils import suggestion_list + + +def describe_suggestion_list(): + + def returns_results_when_input_is_empty(): + assert suggestion_list('', ['a']) == ['a'] + + def returns_empty_array_when_there_are_no_options(): + assert suggestion_list('input', []) == [] + + def returns_options_sorted_based_on_similarity(): + assert suggestion_list( + 'abc', ['a', 'ab', 'abc']) == ['abc', 'ab'] + + assert suggestion_list( + 'csutomer', ['store', 'customer', 'stomer', 'some', 'more']) == [ + 'customer', 'stomer', 'store', 'some'] + + assert suggestion_list( + 'GraphQl', ['graphics', 'SQL', 'GraphQL', 'quarks', 'mark']) == [ + 'GraphQL', 'graphics'] diff --git a/tests/star_wars_data.py b/tests/star_wars_data.py new file mode 100644 index 00000000..233fe336 --- /dev/null +++ b/tests/star_wars_data.py @@ -0,0 +1,139 @@ +"""This defines a basic set of data for our Star Wars Schema. + +This data is hard coded for the sake of the demo, but you could imagine +fetching this data from a backend service rather than from hardcoded +JSON objects in a more complex demo. +""" + +from typing import Sequence, Iterator + +__all__ = [ + 'get_droid', 'get_friends', 'get_hero', 'get_human', + 'get_secret_backstory'] + +# These are classes which correspond to the schema. +# They represent the shape of the data visited during field resolution. + + +class Character: + id: str + name: str + friends: Sequence[str] + appearsIn: Sequence[str] + + +# noinspection PyPep8Naming +class Human(Character): + type = 'Human' + homePlanet: str + + # noinspection PyShadowingBuiltins + def __init__(self, id, name, friends, appearsIn, homePlanet): + self.id, self.name = id, name + self.friends, self.appearsIn = friends, appearsIn + self.homePlanet = homePlanet + + +# noinspection PyPep8Naming +class Droid(Character): + type = 'Droid' + primaryFunction: str + + # noinspection PyShadowingBuiltins + def __init__(self, id, name, friends, appearsIn, primaryFunction): + self.id, self.name = id, name + self.friends, self.appearsIn = friends, appearsIn + self.primaryFunction = primaryFunction + + +luke = Human( + id='1000', + name='Luke Skywalker', + friends=['1002', '1003', '2000', '2001'], + appearsIn=[4, 5, 6], + homePlanet='Tatooine') + +vader = Human( + id='1001', + name='Darth Vader', + friends=['1004'], + appearsIn=[4, 5, 6], + homePlanet='Tatooine') + +han = Human( + id='1002', + name='Han Solo', + friends=['1000', '1003', '2001'], + appearsIn=[4, 5, 6], + homePlanet=None) + +leia = Human( + id='1003', + name='Leia Organa', + friends=['1000', '1002', '2000', '2001'], + appearsIn=[4, 5, 6], + homePlanet='Alderaan') + +tarkin = Human( + id='1004', + name='Wilhuff Tarkin', + friends=['1001'], + appearsIn=[4], + homePlanet=None) + +human_data = { + '1000': luke, '1001': vader, '1002': han, '1003': leia, '1004': tarkin} + +threepio = Droid( + id='2000', + name='C-3PO', + friends=['1000', '1002', '1003', '2001'], + appearsIn=[4, 5, 6], + primaryFunction='Protocol') + +artoo = Droid( + id='2001', + name='R2-D2', + friends=['1000', '1002', '1003'], + appearsIn=[4, 5, 6], + primaryFunction='Astromech') + +droid_data = { + '2000': threepio, '2001': artoo} + + +# noinspection PyShadowingBuiltins +def get_character(id: str) -> Character: + """Helper function to get a character by ID.""" + return human_data.get(id) or droid_data.get(id) + + +def get_friends(character: Character) -> Iterator[Character]: + """Allows us to query for a character's friends.""" + return map(get_character, character.friends) + + +def get_hero(episode: int) -> Character: + """Allows us to fetch the undisputed hero of the trilogy, R2-D2.""" + if episode == 5: + # Luke is the hero of Episode V. + return luke + # Artoo is the hero otherwise. + return artoo + + +# noinspection PyShadowingBuiltins +def get_human(id: str) -> Human: + """Allows us to query for the human with the given id.""" + return human_data.get(id) + + +# noinspection PyShadowingBuiltins +def get_droid(id: str) -> Droid: + """Allows us to query for the droid with the given id.""" + return droid_data.get(id) + + +def get_secret_backstory(character: Character) -> str: + """Raise an error when attempting to get the secret backstory.""" + raise RuntimeError('secretBackstory is secret.') diff --git a/tests/star_wars_schema.py b/tests/star_wars_schema.py new file mode 100644 index 00000000..ffff3a67 --- /dev/null +++ b/tests/star_wars_schema.py @@ -0,0 +1,206 @@ +"""Star Wars GraphQL schema + +This is designed to be an end-to-end test, demonstrating the full +GraphQL stack. + +We will create a GraphQL schema that describes the major characters +in the original Star Wars trilogy. + +NOTE: This may contain spoilers for the original Star Wars trilogy. + +Using our shorthand to describe type systems, the type system for our +Star Wars example is:: + + enum Episode { NEWHOPE, EMPIRE, JEDI } + + interface Character { + id: String! + name: String + friends: [Character] + appearsIn: [Episode] + } + + type Human implements Character { + id: String! + name: String + friends: [Character] + appearsIn: [Episode] + homePlanet: String + } + + type Droid implements Character { + id: String! + name: String + friends: [Character] + appearsIn: [Episode] + primaryFunction: String + } + + type Query { + hero(episode: Episode): Character + human(id: String!): Human + droid(id: String!): Droid + } +""" + +from graphql.type import ( + GraphQLArgument, GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLInterfaceType, GraphQLList, GraphQLNonNull, GraphQLObjectType, + GraphQLSchema, GraphQLString) +from tests.star_wars_data import ( + get_droid, get_friends, get_hero, get_human, get_secret_backstory) + +__all__ = ['star_wars_schema'] + +# We begin by setting up our schema. + +# The original trilogy consists of three movies. +# +# This implements the following type system shorthand: +# enum Episode { NEWHOPE, EMPIRE, JEDI } + +episode_enum = GraphQLEnumType('Episode', { + 'NEWHOPE': GraphQLEnumValue(4, description='Released in 1977.'), + 'EMPIRE': GraphQLEnumValue(5, description='Released in 1980.'), + 'JEDI': GraphQLEnumValue(6, description='Released in 1983.') + }, description='One of the films in the Star Wars Trilogy') + +# Characters in the Star Wars trilogy are either humans or droids. +# +# This implements the following type system shorthand: +# interface Character { +# id: String! +# name: String +# friends: [Character] +# appearsIn: [Episode] +# secretBackstory: String + +character_interface = GraphQLInterfaceType('Character', lambda: { + 'id': GraphQLField( + GraphQLNonNull(GraphQLString), + description='The id of the character.'), + 'name': GraphQLField( + GraphQLString, + description='The name of the character.'), + 'friends': GraphQLField( + GraphQLList(character_interface), + description='The friends of the character,' + ' or an empty list if they have none.'), + 'appearsIn': GraphQLField( + GraphQLList(episode_enum), + description='Which movies they appear in.'), + 'secretBackstory': GraphQLField( + GraphQLString, + description='All secrets about their past.')}, + resolve_type=lambda character, _info: + {'Human': human_type, 'Droid': droid_type}.get(character.type), + description='A character in the Star Wars Trilogy') + +# We define our human type, which implements the character interface. +# +# This implements the following type system shorthand: +# type Human : Character { +# id: String! +# name: String +# friends: [Character] +# appearsIn: [Episode] +# secretBackstory: String +# } + +human_type = GraphQLObjectType('Human', lambda: { + 'id': GraphQLField( + GraphQLNonNull(GraphQLString), + description='The id of the human.'), + 'name': GraphQLField( + GraphQLString, + description='The name of the human.'), + 'friends': GraphQLField( + GraphQLList(character_interface), + description='The friends of the human,' + ' or an empty list if they have none.', + resolve=lambda human, _info: get_friends(human)), + 'appearsIn': GraphQLField( + GraphQLList(episode_enum), + description='Which movies they appear in.'), + 'homePlanet': GraphQLField( + GraphQLString, + description='The home planet of the human, or null if unknown.'), + 'secretBackstory': GraphQLField( + GraphQLString, + resolve=lambda human, _info: get_secret_backstory(human), + description='Where are they from' + ' and how they came to be who they are.')}, + interfaces=[character_interface], + description='A humanoid creature in the Star Wars universe.') + +# The other type of character in Star Wars is a droid. +# +# This implements the following type system shorthand: +# type Droid : Character { +# id: String! +# name: String +# friends: [Character] +# appearsIn: [Episode] +# secretBackstory: String +# primaryFunction: String +# } + +droid_type = GraphQLObjectType('Droid', lambda: { + 'id': GraphQLField( + GraphQLNonNull(GraphQLString), + description='The id of the droid.'), + 'name': GraphQLField( + GraphQLString, + description='The name of the droid.'), + 'friends': GraphQLField( + GraphQLList(character_interface), + description='The friends of the droid,' + ' or an empty list if they have none.', + resolve=lambda droid, _info: get_friends(droid), + ), + 'appearsIn': GraphQLField( + GraphQLList(episode_enum), + description='Which movies they appear in.'), + 'secretBackstory': GraphQLField( + GraphQLString, + resolve=lambda droid, _info: get_secret_backstory(droid), + description='Construction date and the name of the designer.'), + 'primaryFunction': GraphQLField( + GraphQLString, + description='The primary function of the droid.') + }, + interfaces=[character_interface], + description='A mechanical creature in the Star Wars universe.') + +# This is the type that will be the root of our query, and the +# entry point into our schema. It gives us the ability to fetch +# objects by their IDs, as well as to fetch the undisputed hero +# of the Star Wars trilogy, R2-D2, directly. +# +# This implements the following type system shorthand: +# type Query { +# hero(episode: Episode): Character +# human(id: String!): Human +# droid(id: String!): Droid +# } + +# noinspection PyShadowingBuiltins +query_type = GraphQLObjectType('Query', lambda: { + 'hero': GraphQLField(character_interface, args={ + 'episode': GraphQLArgument(episode_enum, description=( + 'If omitted, returns the hero of the whole saga.' + ' If provided, returns the hero of that particular episode.'))}, + resolve=lambda root, _info, episode=None: get_hero(episode)), + 'human': GraphQLField(human_type, args={ + 'id': GraphQLArgument( + GraphQLNonNull(GraphQLString), description='id of the human')}, + resolve=lambda root, _info, id: get_human(id)), + 'droid': GraphQLField(droid_type, args={ + 'id': GraphQLArgument( + GraphQLNonNull(GraphQLString), description='id of the droid')}, + resolve=lambda root, _info, id: get_droid(id))}) + +# Finally, we construct our schema (whose starting query type is the query +# type we defined above) and export it. + +star_wars_schema = GraphQLSchema(query_type, types=[human_type, droid_type]) diff --git a/tests/subscription/__init__.py b/tests/subscription/__init__.py new file mode 100644 index 00000000..8551fd41 --- /dev/null +++ b/tests/subscription/__init__.py @@ -0,0 +1 @@ +"""Tests for graphql.subscription""" diff --git a/tests/subscription/test_map_async_iterator.py b/tests/subscription/test_map_async_iterator.py new file mode 100644 index 00000000..eef04882 --- /dev/null +++ b/tests/subscription/test_map_async_iterator.py @@ -0,0 +1,173 @@ +from pytest import mark, raises + +from graphql.subscription.map_async_iterator import MapAsyncIterator + +try: + # noinspection PyUnresolvedReferences,PyUnboundLocalVariable + anext +except NameError: # anext does not yet exist in Python 3.6 + async def anext(iterable): + """Return the next item from an async iterator.""" + return await iterable.__anext__() + + +def describe_map_async_iterator(): + + @mark.asyncio + async def maps_over_async_values(): + async def source(): + yield 1 + yield 2 + yield 3 + + doubles = MapAsyncIterator(source(), lambda x: x + x) + + assert [value async for value in doubles] == [2, 4, 6] + + @mark.asyncio + async def maps_over_async_values_with_async_function(): + async def source(): + yield 1 + yield 2 + yield 3 + + async def double(x): + return x + x + + doubles = MapAsyncIterator(source(), double) + + assert [value async for value in doubles] == [2, 4, 6] + + @mark.asyncio + async def allows_returning_early_from_async_values(): + async def source(): + yield 1 + yield 2 + yield 3 + + doubles = MapAsyncIterator(source(), lambda x: x + x) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Early return + await doubles.aclose() + + # Subsequent nexts + with raises(StopAsyncIteration): + await anext(doubles) + with raises(StopAsyncIteration): + await anext(doubles) + + @mark.asyncio + async def passes_through_early_return_from_async_values(): + async def source(): + try: + yield 1 + yield 2 + yield 3 + finally: + yield 'done' + yield 'last' + + doubles = MapAsyncIterator(source(), lambda x: x + x) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Early return + await doubles.aclose() + + # Subsequent nexts may yield from finally block + assert await anext(doubles) == 'lastlast' + with raises(GeneratorExit): + assert await anext(doubles) + + @mark.asyncio + async def allows_throwing_errors_through_async_generators(): + async def source(): + yield 1 + yield 2 + yield 3 + + doubles = MapAsyncIterator(source(), lambda x: x + x) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Throw error + with raises(RuntimeError) as exc_info: + await doubles.athrow(RuntimeError('ouch')) + + assert str(exc_info.value) == 'ouch' + + with raises(StopAsyncIteration): + await anext(doubles) + with raises(StopAsyncIteration): + await anext(doubles) + + @mark.asyncio + async def passes_through_caught_errors_through_async_generators(): + async def source(): + try: + yield 1 + yield 2 + yield 3 + except Exception as e: + yield e + + doubles = MapAsyncIterator(source(), lambda x: x + x) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Throw error + await doubles.athrow(RuntimeError('ouch')) + + with raises(StopAsyncIteration): + await anext(doubles) + with raises(StopAsyncIteration): + await anext(doubles) + + @mark.asyncio + async def does_not_normally_map_over_thrown_errors(): + async def source(): + yield 'Hello' + raise RuntimeError('Goodbye') + + doubles = MapAsyncIterator(source(), lambda x: x + x) + + assert await anext(doubles) == 'HelloHello' + + with raises(RuntimeError): + await anext(doubles) + + @mark.asyncio + async def does_not_normally_map_over_externally_thrown_errors(): + async def source(): + yield 'Hello' + + doubles = MapAsyncIterator(source(), lambda x: x + x) + + assert await anext(doubles) == 'HelloHello' + + with raises(RuntimeError): + await doubles.athrow(RuntimeError('Goodbye')) + + @mark.asyncio + async def maps_over_thrown_errors_if_second_callback_provided(): + async def source(): + yield 'Hello' + raise RuntimeError('Goodbye') + + doubles = MapAsyncIterator( + source(), lambda x: x + x, lambda error: error) + + assert await anext(doubles) == 'HelloHello' + + result = await anext(doubles) + assert isinstance(result, RuntimeError) + assert str(result) == 'Goodbye' + + with raises(StopAsyncIteration): + await anext(doubles) diff --git a/tests/subscription/test_subscribe.py b/tests/subscription/test_subscribe.py new file mode 100644 index 00000000..a4c5d9a0 --- /dev/null +++ b/tests/subscription/test_subscribe.py @@ -0,0 +1,626 @@ +from pytest import mark, raises + +from graphql.language import parse +from graphql.pyutils import EventEmitter, EventEmitterAsyncIterator +from graphql.type import ( + GraphQLArgument, GraphQLBoolean, GraphQLField, GraphQLInt, GraphQLList, + GraphQLObjectType, GraphQLSchema, GraphQLString) +from graphql.subscription import subscribe + +EmailType = GraphQLObjectType('Email', { + 'from': GraphQLField(GraphQLString), + 'subject': GraphQLField(GraphQLString), + 'message': GraphQLField(GraphQLString), + 'unread': GraphQLField(GraphQLBoolean)}) + +InboxType = GraphQLObjectType('Inbox', { + 'total': GraphQLField( + GraphQLInt, resolve=lambda inbox, _info: len(inbox['emails'])), + 'unread': GraphQLField( + GraphQLInt, resolve=lambda inbox, _info: sum( + 1 for email in inbox['emails'] if email['unread'])), + 'emails': GraphQLField(GraphQLList(EmailType))}) + +QueryType = GraphQLObjectType('Query', {'inbox': GraphQLField(InboxType)}) + +EmailEventType = GraphQLObjectType('EmailEvent', { + 'email': GraphQLField(EmailType), + 'inbox': GraphQLField(InboxType)}) + + +try: + # noinspection PyUnresolvedReferences,PyUnboundLocalVariable + anext +except NameError: # anext does not yet exist in Python 3.6 + async def anext(iterable): + """Return the next item from an async iterator.""" + return await iterable.__anext__() + + +def email_schema_with_resolvers(subscribe_fn=None, resolve_fn=None): + return GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType('Subscription', { + 'importantEmail': GraphQLField( + EmailEventType, + args={'priority': GraphQLArgument(GraphQLInt)}, + resolve=resolve_fn, + subscribe=subscribe_fn)})) + + +email_schema = email_schema_with_resolvers() + + +async def create_subscription( + pubsub, schema: GraphQLSchema=email_schema, ast=None, variables=None): + data = { + 'inbox': { + 'emails': [{ + 'from': 'joe@graphql.org', + 'subject': 'Hello', + 'message': 'Hello World', + 'unread': False + }] + }, + 'importantEmail': lambda _info, priority=None: + EventEmitterAsyncIterator(pubsub, 'importantEmail') + } + + def send_important_email(new_email): + data['inbox']['emails'].append(new_email) + # Returns true if the event was consumed by a subscriber. + return pubsub.emit('importantEmail', { + 'importantEmail': { + 'email': new_email, + 'inbox': data['inbox']}}) + + default_ast = parse(""" + subscription ($priority: Int = 0) { + importantEmail(priority: $priority) { + email { + from + subject + } + inbox { + unread + total + } + } + } + """) + + # `subscribe` yields AsyncIterator or ExecutionResult + return send_important_email, await subscribe( + schema, ast or default_ast, data, variable_values=variables) + + +# Check all error cases when initializing the subscription. +def describe_subscription_initialization_phase(): + + @mark.asyncio + async def accepts_an_object_with_named_properties_as_arguments(): + document = parse(""" + subscription { + importantEmail + } + """) + + async def empty_async_iterator(_info): + for value in (): + yield value + + await subscribe( + email_schema, document, {'importantEmail': empty_async_iterator}) + + @mark.asyncio + async def accepts_multiple_subscription_fields_defined_in_schema(): + pubsub = EventEmitter() + SubscriptionTypeMultiple = GraphQLObjectType('Subscription', { + 'importantEmail': GraphQLField(EmailEventType), + 'nonImportantEmail': GraphQLField(EmailEventType)}) + + test_schema = GraphQLSchema( + query=QueryType, subscription=SubscriptionTypeMultiple) + + send_important_email, subscription = await create_subscription( + pubsub, test_schema) + + send_important_email({ + 'from': 'yuzhi@graphql.org', + 'subject': 'Alright', + 'message': 'Tests are good', + 'unread': True}) + + await anext(subscription) + + @mark.asyncio + async def accepts_type_definition_with_sync_subscribe_function(): + pubsub = EventEmitter() + + def subscribe_email(_inbox, _info): + return EventEmitterAsyncIterator(pubsub, 'importantEmail') + + schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType('Subscription', { + 'importantEmail': GraphQLField( + GraphQLString, subscribe=subscribe_email)})) + + ast = parse(""" + subscription { + importantEmail + } + """) + + subscription = await subscribe(schema, ast) + + pubsub.emit('importantEmail', {'importantEmail': {}}) + + await anext(subscription) + + @mark.asyncio + async def accepts_type_definition_with_async_subscribe_function(): + pubsub = EventEmitter() + + async def subscribe_email(_inbox, _info): + return EventEmitterAsyncIterator(pubsub, 'importantEmail') + + schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType('Subscription', { + 'importantEmail': GraphQLField( + GraphQLString, subscribe=subscribe_email)})) + + ast = parse(""" + subscription { + importantEmail + } + """) + + subscription = await subscribe(schema, ast) + + pubsub.emit('importantEmail', {'importantEmail': {}}) + + await anext(subscription) + + @mark.asyncio + async def should_only_resolve_the_first_field_of_invalid_multi_field(): + did_resolve = {'importantEmail': False, 'nonImportantEmail': False} + + def subscribe_important(_inbox, _info): + did_resolve['importantEmail'] = True + return EventEmitterAsyncIterator(EventEmitter(), 'event') + + def subscribe_non_important(_inbox, _info): + did_resolve['nonImportantEmail'] = True + return EventEmitterAsyncIterator(EventEmitter(), 'event') + + SubscriptionTypeMultiple = GraphQLObjectType('Subscription', { + 'importantEmail': GraphQLField( + EmailEventType, subscribe=subscribe_important), + 'nonImportantEmail': GraphQLField( + EmailEventType, subscribe=subscribe_non_important)}) + + test_schema = GraphQLSchema( + query=QueryType, subscription=SubscriptionTypeMultiple) + + ast = parse(""" + subscription { + importantEmail + nonImportantEmail + } + """) + + subscription = await subscribe(test_schema, ast) + ignored = anext(subscription) # Ask for a result, but ignore it. + + assert did_resolve['importantEmail'] is True + assert did_resolve['nonImportantEmail'] is False + + # Close subscription + # noinspection PyUnresolvedReferences + await subscription.aclose() + + with raises(StopAsyncIteration): + await ignored + + # noinspection PyArgumentList + @mark.asyncio + async def throws_an_error_if_schema_is_missing(): + document = parse(""" + subscription { + importantEmail + } + """) + + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + await subscribe(None, document) + + assert str(exc_info.value) == 'Expected None to be a GraphQL schema.' + + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + await subscribe(document=document) + + msg = str(exc_info.value) + assert 'missing' in msg and "argument: 'schema'" in msg + + # noinspection PyArgumentList + @mark.asyncio + async def throws_an_error_if_document_is_missing(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + await subscribe(email_schema, None) + + assert str(exc_info.value) == 'Must provide document' + + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + await subscribe(schema=email_schema) + + msg = str(exc_info.value) + assert 'missing' in msg and "argument: 'document'" in msg + + @mark.asyncio + async def resolves_to_an_error_for_unknown_subscription_field(): + ast = parse(""" + subscription { + unknownField + } + """) + + pubsub = EventEmitter() + + subscription = (await create_subscription(pubsub, ast=ast))[1] + + assert subscription == (None, [{ + 'message': "The subscription field 'unknownField' is not defined.", + 'locations': [(3, 15)]}]) + + @mark.asyncio + async def throws_an_error_if_subscribe_does_not_return_an_iterator(): + invalid_email_schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType('Subscription', { + 'importantEmail': GraphQLField( + GraphQLString, subscribe=lambda _inbox, _info: 'test')})) + + pubsub = EventEmitter() + + with raises(TypeError) as exc_info: + await create_subscription(pubsub, invalid_email_schema) + + assert str(exc_info.value) == ( + "Subscription field must return AsyncIterable. Received: 'test'") + + @mark.asyncio + async def resolves_to_an_error_for_subscription_resolver_errors(): + + async def test_reports_error(schema): + result = await subscribe( + schema, + parse(""" + subscription { + importantEmail + } + """)) + + assert result == (None, [{ + 'message': 'test error', + 'locations': [(3, 23)], 'path': ['importantEmail']}]) + + # Returning an error + def return_error(*args): + return TypeError('test error') + + subscription_returning_error_schema = email_schema_with_resolvers( + return_error) + await test_reports_error(subscription_returning_error_schema) + + # Throwing an error + def throw_error(*args): + raise TypeError('test error') + + subscription_throwing_error_schema = email_schema_with_resolvers( + throw_error) + await test_reports_error(subscription_throwing_error_schema) + + # Resolving to an error + async def resolve_error(*args): + return TypeError('test error') + + subscription_resolving_error_schema = email_schema_with_resolvers( + resolve_error) + await test_reports_error(subscription_resolving_error_schema) + + # Rejecting with an error + async def reject_error(*args): + return TypeError('test error') + + subscription_rejecting_error_schema = email_schema_with_resolvers( + reject_error) + await test_reports_error(subscription_rejecting_error_schema) + + @mark.asyncio + async def resolves_to_an_error_if_variables_were_wrong_type(): + # If we receive variables that cannot be coerced correctly, subscribe() + # will resolve to an ExecutionResult that contains an informative error + # description. + ast = parse(""" + subscription ($priority: Int) { + importantEmail(priority: $priority) { + email { + from + subject + } + inbox { + unread + total + } + } + } + """) + + pubsub = EventEmitter() + data = { + 'inbox': { + 'emails': [{ + 'from': 'joe@graphql.org', + 'subject': 'Hello', + 'message': 'Hello World', + 'unread': False + }] + }, + 'importantEmail': lambda _info: EventEmitterAsyncIterator( + pubsub, 'importantEmail')} + + result = await subscribe( + email_schema, ast, data, variable_values={'priority': 'meow'}) + + assert result == (None, [{ + 'message': + "Variable '$priority' got invalid value 'meow'; Expected" + " type Int; Int cannot represent non-integer value: 'meow'", + 'locations': [(2, 27)]}]) + + assert result.errors[0].original_error is not None + + +# Once a subscription returns a valid AsyncIterator, it can still yield errors. +def describe_subscription_publish_phase(): + + @mark.asyncio + async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): + pubsub = EventEmitter() + send_important_email, subscription = await create_subscription(pubsub) + second = await create_subscription(pubsub) + + payload1 = anext(subscription) + payload2 = anext(second[1]) + + assert send_important_email({ + 'from': 'yuzhi@graphql.org', + 'subject': 'Alright', + 'message': 'Tests are good', + 'unread': True}) is True + + expected_payload = { + 'importantEmail': { + 'email': { + 'from': 'yuzhi@graphql.org', + 'subject': 'Alright' + }, + 'inbox': { + 'unread': 1, + 'total': 2 + }, + } + } + + assert await payload1 == (expected_payload, None) + assert await payload2 == (expected_payload, None) + + @mark.asyncio + async def produces_a_payload_per_subscription_event(): + pubsub = EventEmitter() + send_important_email, subscription = await create_subscription(pubsub) + + # Wait for the next subscription payload. + payload = anext(subscription) + + # A new email arrives! + assert send_important_email({ + 'from': 'yuzhi@graphql.org', + 'subject': 'Alright', + 'message': 'Tests are good', + 'unread': True}) is True + + # The previously waited on payload now has a value. + assert await payload == ({ + 'importantEmail': { + 'email': { + 'from': 'yuzhi@graphql.org', + 'subject': 'Alright' + }, + 'inbox': { + 'unread': 1, + 'total': 2 + }, + } + }, None) + + # Another new email arrives, before subscription.___anext__ is called. + assert send_important_email({ + 'from': 'hyo@graphql.org', + 'subject': 'Tools', + 'message': 'I <3 making things', + 'unread': True}) is True + + # The next waited on payload will have a value. + assert await anext(subscription) == ({ + 'importantEmail': { + 'email': { + 'from': 'hyo@graphql.org', + 'subject': 'Tools' + }, + 'inbox': { + 'unread': 2, + 'total': 3 + }, + } + }, None) + + # The client decides to disconnect. + # noinspection PyUnresolvedReferences + await subscription.aclose() + + # Which may result in disconnecting upstream services as well. + assert send_important_email({ + 'from': 'adam@graphql.org', + 'subject': 'Important', + 'message': 'Read me please', + 'unread': True}) is False # No more listeners. + + # Awaiting subscription after closing it results in completed results. + with raises(StopAsyncIteration): + assert await anext(subscription) + + @mark.asyncio + async def event_order_is_correct_for_multiple_publishes(): + pubsub = EventEmitter() + send_important_email, subscription = await create_subscription(pubsub) + + payload = anext(subscription) + + # A new email arrives! + assert send_important_email({ + 'from': 'yuzhi@graphql.org', + 'subject': 'Message', + 'message': 'Tests are good', + 'unread': True}) is True + + # A new email arrives! + assert send_important_email({ + 'from': 'yuzhi@graphql.org', + 'subject': 'Message 2', + 'message': 'Tests are good 2', + 'unread': True}) is True + + assert await payload == ({ + 'importantEmail': { + 'email': { + 'from': 'yuzhi@graphql.org', + 'subject': 'Message' + }, + 'inbox': { + 'unread': 2, + 'total': 3 + }, + } + }, None) + + payload = subscription.__anext__() + + assert await payload == ({ + 'importantEmail': { + 'email': { + 'from': 'yuzhi@graphql.org', + 'subject': 'Message 2' + }, + 'inbox': { + 'unread': 2, + 'total': 3 + }, + } + }, None) + + @mark.asyncio + async def should_handle_error_during_execution_of_source_event(): + async def subscribe_fn(_event, _info): + yield {'email': {'subject': 'Hello'}} + yield {'email': {'subject': 'Goodbye'}} + yield {'email': {'subject': 'Bonjour'}} + + def resolve_fn(event, _info): + if event['email']['subject'] == 'Goodbye': + raise RuntimeError('Never leave') + return event + + erroring_email_schema = email_schema_with_resolvers( + subscribe_fn, resolve_fn) + + subscription = await subscribe(erroring_email_schema, parse(""" + subscription { + importantEmail { + email { + subject + } + } + } + """)) + + payload1 = await anext(subscription) + assert payload1 == ({ + 'importantEmail': { + 'email': { + 'subject': 'Hello' + }, + }, + }, None) + + # An error in execution is presented as such. + payload2 = await anext(subscription) + assert payload2 == ({'importantEmail': None}, [{ + 'message': 'Never leave', + 'locations': [(3, 15)], 'path': ['importantEmail']}]) + + # However that does not close the response event stream. Subsequent + # events are still executed. + payload3 = await anext(subscription) + assert payload3 == ({ + 'importantEmail': { + 'email': { + 'subject': 'Bonjour' + }, + }, + }, None) + + @mark.asyncio + async def should_pass_through_error_thrown_in_source_event_stream(): + async def subscribe_fn(_event, _info): + yield {'email': {'subject': 'Hello'}} + raise RuntimeError('test error') + + def resolve_fn(event, _info): + return event + + erroring_email_schema = email_schema_with_resolvers( + subscribe_fn, resolve_fn) + + subscription = await subscribe(erroring_email_schema, parse(""" + subscription { + importantEmail { + email { + subject + } + } + } + """)) + + payload1 = await anext(subscription) + assert payload1 == ({ + 'importantEmail': { + 'email': { + 'subject': 'Hello' + } + } + }, None) + + with raises(RuntimeError) as exc_info: + await anext(subscription) + + assert str(exc_info.value) == 'test error' + + with raises(StopAsyncIteration): + await anext(subscription) diff --git a/tests/test_star_wars_introspection.py b/tests/test_star_wars_introspection.py new file mode 100644 index 00000000..24ec482c --- /dev/null +++ b/tests/test_star_wars_introspection.py @@ -0,0 +1,367 @@ +from graphql import graphql_sync + +from .star_wars_schema import star_wars_schema + + +def describe_star_wars_introspection_tests(): + + def describe_basic_introspection(): + + def allows_querying_the_schema_for_types(): + query = """ + query IntrospectionTypeQuery { + __schema { + types { + name + } + } + } + """ + expected = { + '__schema': { + 'types': [{ + 'name': 'Query' + }, { + 'name': 'Episode' + }, { + 'name': 'Character' + }, { + 'name': 'String' + }, { + 'name': 'Human' + }, { + 'name': 'Droid' + }, { + 'name': '__Schema' + }, { + 'name': '__Type' + }, { + 'name': '__TypeKind' + }, { + 'name': 'Boolean' + }, { + 'name': '__Field' + }, { + 'name': '__InputValue' + }, { + 'name': '__EnumValue' + }, { + 'name': '__Directive' + }, { + 'name': '__DirectiveLocation' + }] + } + } + + result = graphql_sync(star_wars_schema, query) + assert result == (expected, None) + + def allows_querying_the_schema_for_query_type(): + query = """ + query IntrospectionQueryTypeQuery { + __schema { + queryType { + name + } + } + } + """ + expected = { + '__schema': { + 'queryType': { + 'name': 'Query' + } + } + } + result = graphql_sync(star_wars_schema, query) + assert result == (expected, None) + + def allows_querying_the_schema_for_a_specific_type(): + query = """ + query IntrospectionDroidTypeQuery { + __type(name: "Droid") { + name + } + } + """ + expected = { + '__type': { + 'name': 'Droid' + } + } + result = graphql_sync(star_wars_schema, query) + assert result == (expected, None) + + def allows_querying_the_schema_for_an_object_kind(): + query = """ + query IntrospectionDroidKindQuery { + __type(name: "Droid") { + name + kind + } + } + """ + expected = { + '__type': { + 'name': 'Droid', + 'kind': 'OBJECT' + } + } + result = graphql_sync(star_wars_schema, query) + assert result == (expected, None) + + def allows_querying_the_schema_for_an_interface_kind(): + query = """ + query IntrospectionCharacterKindQuery { + __type(name: "Character") { + name + kind + } + } + """ + expected = { + '__type': { + 'name': 'Character', + 'kind': 'INTERFACE' + } + } + result = graphql_sync(star_wars_schema, query) + assert result == (expected, None) + + def allows_querying_the_schema_for_object_fields(): + query = """ + query IntrospectionDroidFieldsQuery { + __type(name: "Droid") { + name + fields { + name + type { + name + kind + } + } + } + } + """ + expected = { + '__type': { + 'name': 'Droid', + 'fields': [{ + 'name': 'id', + 'type': { + 'name': None, + 'kind': 'NON_NULL' + } + }, { + 'name': 'name', + 'type': { + 'name': 'String', + 'kind': 'SCALAR' + } + }, { + 'name': 'friends', + 'type': { + 'name': None, + 'kind': 'LIST' + } + }, { + 'name': 'appearsIn', + 'type': { + 'name': None, + 'kind': 'LIST' + } + }, { + 'name': 'secretBackstory', + 'type': { + 'name': 'String', + 'kind': 'SCALAR' + } + }, { + 'name': 'primaryFunction', + 'type': { + 'name': 'String', + 'kind': 'SCALAR' + } + }] + } + } + result = graphql_sync(star_wars_schema, query) + assert result == (expected, None) + + def allows_querying_the_schema_for_nested_object_fields(): + query = """ + query IntrospectionDroidNestedFieldsQuery { + __type(name: "Droid") { + name + fields { + name + type { + name + kind + ofType { + name + kind + } + } + } + } + } + """ + expected = { + '__type': { + 'name': 'Droid', + 'fields': [{ + 'name': 'id', + 'type': { + 'name': None, + 'kind': 'NON_NULL', + 'ofType': { + 'name': 'String', + 'kind': 'SCALAR' + } + } + }, { + 'name': 'name', + 'type': { + 'name': 'String', + 'kind': 'SCALAR', + 'ofType': None + } + }, { + 'name': 'friends', + 'type': { + 'name': None, + 'kind': 'LIST', + 'ofType': { + 'name': 'Character', + 'kind': 'INTERFACE' + } + } + }, { + 'name': 'appearsIn', + 'type': { + 'name': None, + 'kind': 'LIST', + 'ofType': { + 'name': 'Episode', + 'kind': 'ENUM' + } + } + }, { + 'name': 'secretBackstory', + 'type': { + 'name': 'String', + 'kind': 'SCALAR', + 'ofType': None + } + }, { + 'name': 'primaryFunction', + 'type': { + 'name': 'String', + 'kind': 'SCALAR', + 'ofType': None + } + }] + } + } + result = graphql_sync(star_wars_schema, query) + assert result == (expected, None) + + def allows_querying_the_schema_for_field_args(): + query = """ + query IntrospectionQueryTypeQuery { + __schema { + queryType { + fields { + name + args { + name + description + type { + name + kind + ofType { + name + kind + } + } + defaultValue + } + } + } + } + } + """ + expected = { + '__schema': { + 'queryType': { + 'fields': [{ + 'name': 'hero', + 'args': [{ + 'defaultValue': None, + 'description': + 'If omitted, returns the hero of the whole' + ' saga. If provided, returns the hero of' + ' that particular episode.', + 'name': 'episode', + 'type': { + 'kind': 'ENUM', + 'name': 'Episode', + 'ofType': None + } + }] + }, { + 'name': 'human', + 'args': [{ + 'name': 'id', + 'description': 'id of the human', + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'String' + } + }, + 'defaultValue': None + }] + }, { + 'name': 'droid', + 'args': [{ + 'name': 'id', + 'description': 'id of the droid', + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'String' + } + }, + 'defaultValue': None + }] + }] + } + } + } + result = graphql_sync(star_wars_schema, query) + assert result == (expected, None) + + def allows_querying_the_schema_for_documentation(): + query = """ + query IntrospectionDroidDescriptionQuery { + __type(name: "Droid") { + name + description + } + } + """ + expected = { + '__type': { + 'name': 'Droid', + 'description': + 'A mechanical creature in the Star Wars universe.' + } + } + result = graphql_sync(star_wars_schema, query) + assert result == (expected, None) diff --git a/tests/test_star_wars_query.py b/tests/test_star_wars_query.py new file mode 100644 index 00000000..4a3479ee --- /dev/null +++ b/tests/test_star_wars_query.py @@ -0,0 +1,422 @@ +from pytest import mark + +from graphql import graphql + +from .star_wars_schema import star_wars_schema + + +def describe_star_wars_query_tests(): + + def describe_basic_queries(): + + @mark.asyncio + async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): + query = """ + query HeroNameQuery { + hero { + name + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({'hero': {'name': 'R2-D2'}}, None) + + @mark.asyncio + async def accepts_an_object_with_named_properties_to_graphql(): + query = """ + query HeroNameQuery { + hero { + name + } + } + """ + result = await graphql(schema=star_wars_schema, source=query) + assert result == ({'hero': {'name': 'R2-D2'}}, None) + + @mark.asyncio + async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): + query = """ + query HeroNameAndFriendsQuery { + hero { + id + name + friends { + name + } + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'hero': { + 'id': '2001', + 'name': 'R2-D2', + 'friends': [ + {'name': 'Luke Skywalker'}, + {'name': 'Han Solo'}, + {'name': 'Leia Organa'}, + ] + } + }, None) + + def describe_nested_queries(): + + @mark.asyncio + async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): + query = """ + query NestedQuery { + hero { + name + friends { + name + appearsIn + friends { + name + } + } + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'hero': { + 'name': 'R2-D2', + 'friends': [ + { + 'name': 'Luke Skywalker', + 'appearsIn': ['NEWHOPE', 'EMPIRE', 'JEDI'], + 'friends': [ + { + 'name': 'Han Solo', + }, + { + 'name': 'Leia Organa', + }, + { + 'name': 'C-3PO', + }, + { + 'name': 'R2-D2', + }, + ] + }, + { + 'name': 'Han Solo', + 'appearsIn': ['NEWHOPE', 'EMPIRE', 'JEDI'], + 'friends': [ + { + 'name': 'Luke Skywalker', + }, + { + 'name': 'Leia Organa', + }, + { + 'name': 'R2-D2', + }, + ] + }, + { + 'name': 'Leia Organa', + 'appearsIn': ['NEWHOPE', 'EMPIRE', 'JEDI'], + 'friends': [ + { + 'name': 'Luke Skywalker', + }, + { + 'name': 'Han Solo', + }, + { + 'name': 'C-3PO', + }, + { + 'name': 'R2-D2', + }, + ] + }, + ] + } + }, None) + + def describe_using_ids_and_query_parameters_to_refetch_objects(): + + @mark.asyncio + async def allows_us_to_query_for_r2_d2_directly_using_his_id(): + query = """ + query { + droid(id: "2001") { + name + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({'droid': {'name': 'R2-D2'}}, None) + + @mark.asyncio + async def allows_us_to_query_for_luke_directly_using_his_id(): + query = """ + query FetchLukeQuery { + human(id: "1000") { + name + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({'human': {'name': 'Luke Skywalker'}}, None) + + @mark.asyncio + async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): + query = """ + query FetchSomeIDQuery($someId: String!) { + human(id: $someId) { + name + } + } + """ + params = {'someId': '1000'} + result = await graphql(star_wars_schema, query, + variable_values=params) + assert result == ({'human': {'name': 'Luke Skywalker'}}, None) + + @mark.asyncio + async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): + query = """ + query FetchSomeIDQuery($someId: String!) { + human(id: $someId) { + name + } + } + """ + params = {'someId': '1002'} + result = await graphql(star_wars_schema, query, + variable_values=params) + assert result == ({'human': {'name': 'Han Solo'}}, None) + + @mark.asyncio + async def generic_query_that_gets_null_back_when_passed_invalid_id(): + query = """ + query humanQuery($id: String!) { + human(id: $id) { + name + } + } + """ + params = {'id': 'not a valid id'} + result = await graphql(star_wars_schema, query, + variable_values=params) + assert result == ({'human': None}, None) + + def describe_using_aliases_to_change_the_key_in_the_response(): + + @mark.asyncio + async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): + query = """ + query FetchLukeAliased { + luke: human(id: "1000") { + name + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({'luke': {'name': 'Luke Skywalker'}}, None) + + @mark.asyncio + async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): + query = """ + query FetchLukeAndLeiaAliased { + luke: human(id: "1000") { + name + } + leia: human(id: "1003") { + name + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'luke': { + 'name': 'Luke Skywalker', + }, + 'leia': { + 'name': 'Leia Organa', + } + }, None) + + def describe_uses_fragments_to_express_more_complex_queries(): + + @mark.asyncio + async def allows_us_to_query_using_duplicated_content(): + query = """ + query DuplicateFields { + luke: human(id: "1000") { + name + homePlanet + } + leia: human(id: "1003") { + name + homePlanet + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'luke': { + 'name': 'Luke Skywalker', + 'homePlanet': 'Tatooine', + }, + 'leia': { + 'name': 'Leia Organa', + 'homePlanet': 'Alderaan', + } + }, None) + + @mark.asyncio + async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): + query = """ + query UseFragment { + luke: human(id: "1000") { + ...HumanFragment + } + leia: human(id: "1003") { + ...HumanFragment + } + } + fragment HumanFragment on Human { + name + homePlanet + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'luke': { + 'name': 'Luke Skywalker', + 'homePlanet': 'Tatooine', + }, + 'leia': { + 'name': 'Leia Organa', + 'homePlanet': 'Alderaan', + } + }, None) + + def describe_using_typename_to_find_the_type_of_an_object(): + + @mark.asyncio + async def allows_us_to_verify_that_r2_d2_is_a_droid(): + query = """ + query CheckTypeOfR2 { + hero { + __typename + name + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'hero': { + '__typename': 'Droid', + 'name': 'R2-D2', + } + }, None) + + @mark.asyncio + async def allows_us_to_verify_that_luke_is_a_human(): + query = """ + query CheckTypeOfLuke { + hero(episode: EMPIRE) { + __typename + name + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'hero': { + '__typename': 'Human', + 'name': 'Luke Skywalker', + } + }, None) + + def describe_reporting_errors_raised_in_resolvers(): + + @mark.asyncio + async def correctly_reports_error_on_accessing_secret_backstory(): + query = """ + query HeroNameQuery { + hero { + name + secretBackstory + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'hero': { + 'name': 'R2-D2', + 'secretBackstory': None + } + }, [{ + 'message': 'secretBackstory is secret.', + 'locations': [(5, 21)], 'path': ['hero', 'secretBackstory'] + }]) + + @mark.asyncio + async def correctly_reports_error_on_accessing_backstory_in_a_list(): + query = """ + query HeroNameQuery { + hero { + name + friends { + name + secretBackstory + } + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'hero': { + 'name': 'R2-D2', + 'friends': [{ + 'name': 'Luke Skywalker', + 'secretBackstory': None + }, { + 'name': 'Han Solo', + 'secretBackstory': None + }, { + 'name': 'Leia Organa', + 'secretBackstory': None + }], + } + }, [{ + 'message': 'secretBackstory is secret.', + 'locations': [(7, 23)], + 'path': ['hero', 'friends', 0, 'secretBackstory'] + }, { + 'message': 'secretBackstory is secret.', + 'locations': [(7, 23)], + 'path': ['hero', 'friends', 1, 'secretBackstory'] + }, { + 'message': 'secretBackstory is secret.', + 'locations': [(7, 23)], + 'path': ['hero', 'friends', 2, 'secretBackstory'] + }]) + + @mark.asyncio + async def correctly_reports_error_on_accessing_through_an_alias(): + query = """ + query HeroNameQuery { + mainHero: hero { + name + story: secretBackstory + } + } + """ + result = await graphql(star_wars_schema, query) + assert result == ({ + 'mainHero': { + 'name': 'R2-D2', + 'story': None + } + }, [{ + 'message': 'secretBackstory is secret.', + 'locations': [(5, 21)], 'path': ['mainHero', 'story'] + }]) diff --git a/tests/test_star_wars_validation.py b/tests/test_star_wars_validation.py new file mode 100644 index 00000000..7c630151 --- /dev/null +++ b/tests/test_star_wars_validation.py @@ -0,0 +1,108 @@ +from graphql.language import parse, Source +from graphql.validation import validate + +from .star_wars_schema import star_wars_schema + + +def validation_errors(query): + """Helper function to test a query and the expected response.""" + source = Source(query, 'StarWars.graphql') + ast = parse(source) + return validate(star_wars_schema, ast) + + +def describe_star_wars_validation_tests(): + + def describe_basic_queries(): + + def validates_a_complex_but_valid_query(): + query = """ + query NestedQueryWithFragment { + hero { + ...NameAndAppearances + friends { + ...NameAndAppearances + friends { + ...NameAndAppearances + } + } + } + } + + fragment NameAndAppearances on Character { + name + appearsIn + } + """ + assert not validation_errors(query) + + def notes_that_non_existent_fields_are_invalid(): + query = """ + query HeroSpaceshipQuery { + hero { + favoriteSpaceship + } + } + """ + assert validation_errors(query) + + def requires_fields_on_object(): + query = """ + query HeroNoFieldsQuery { + hero + } + """ + assert validation_errors(query) + + def disallows_fields_on_scalars(): + query = """ + query HeroFieldsOnScalarQuery { + hero { + name { + firstCharacterOfName + } + } + } + """ + assert validation_errors(query) + + def disallows_object_fields_on_interfaces(): + query = """ + query DroidFieldOnCharacter { + hero { + name + primaryFunction + } + } + """ + assert validation_errors(query) + + def allows_object_fields_in_fragments(): + query = """ + query DroidFieldInFragment { + hero { + name + ...DroidFields + } + } + + fragment DroidFields on Droid { + primaryFunction + } + """ + assert not validation_errors(query) + + def allows_object_fields_in_inline_fragments(): + query = """ + query DroidFieldInFragment { + hero { + name + ...DroidFields + } + } + + fragment DroidFields on Droid { + primaryFunction + } + """ + assert not validation_errors(query) diff --git a/tests/type/__init__.py b/tests/type/__init__.py new file mode 100644 index 00000000..aaa6fa28 --- /dev/null +++ b/tests/type/__init__.py @@ -0,0 +1 @@ +"""Tests for graphql.type""" diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py new file mode 100644 index 00000000..5301b88b --- /dev/null +++ b/tests/type/test_definition.py @@ -0,0 +1,821 @@ +from typing import cast, Dict + +from pytest import fixture, mark, raises + +from graphql.error import INVALID +from graphql.type import ( + GraphQLArgument, GraphQLBoolean, GraphQLField, + GraphQLInt, GraphQLString, GraphQLObjectType, GraphQLList, + GraphQLScalarType, GraphQLInterfaceType, GraphQLUnionType, + GraphQLEnumType, GraphQLEnumValue, GraphQLInputObjectType, GraphQLSchema, + GraphQLOutputType, GraphQLInputField, GraphQLNonNull, is_input_type, + is_output_type) + + +BlogImage = GraphQLObjectType('Image', { + 'url': GraphQLField(GraphQLString), + 'width': GraphQLField(GraphQLInt), + 'height': GraphQLField(GraphQLInt)}) + + +BlogAuthor = GraphQLObjectType('Author', lambda: { + 'id': GraphQLField(GraphQLString), + 'name': GraphQLField(GraphQLString), + 'pic': GraphQLField( + BlogImage, + args={ + 'width': GraphQLArgument(GraphQLInt), + 'height': GraphQLArgument(GraphQLInt), + }), + 'recentArticle': GraphQLField(BlogArticle)}) + + +BlogArticle = GraphQLObjectType('Article', lambda: { + 'id': GraphQLField(GraphQLString), + 'isPublished': GraphQLField(GraphQLBoolean), + 'author': GraphQLField(BlogAuthor), + 'title': GraphQLField(GraphQLString), + 'body': GraphQLField(GraphQLString)}) + + +BlogQuery = GraphQLObjectType('Query', { + 'article': GraphQLField( + BlogArticle, + args={ + 'id': GraphQLArgument(GraphQLString), + }), + 'feed': GraphQLField(GraphQLList(BlogArticle))}) + + +BlogMutation = GraphQLObjectType('Mutation', { + 'writeArticle': GraphQLField(BlogArticle)}) + + +BlogSubscription = GraphQLObjectType('Subscription', { + 'articleSubscribe': GraphQLField( + args={'id': GraphQLArgument(GraphQLString)}, + type_=BlogArticle + ) +}) + +ObjectType = GraphQLObjectType('Object', {}) +InterfaceType = GraphQLInterfaceType('Interface') +UnionType = GraphQLUnionType('Union', [ObjectType], resolve_type=lambda: None) +EnumType = GraphQLEnumType('Enum', {'foo': GraphQLEnumValue()}) +InputObjectType = GraphQLInputObjectType('InputObject', {}) +ScalarType = GraphQLScalarType( + 'Scalar', serialize=lambda: None, + parse_value=lambda: None, parse_literal=lambda: None) + + +def schema_with_field_type(type_: GraphQLOutputType) -> GraphQLSchema: + return GraphQLSchema( + query=GraphQLObjectType('Query', {'field': GraphQLField(type_)}), + types=[type_]) + + +def describe_type_system_example(): + + def defines_a_query_only_schema(): + BlogSchema = GraphQLSchema(BlogQuery) + + assert BlogSchema.query_type == BlogQuery + + article_field = BlogQuery.fields['article'] + assert article_field.type == BlogArticle + assert article_field.type.name == 'Article' + + article_field_type = article_field.type + assert isinstance(article_field_type, GraphQLObjectType) + + title_field = article_field_type.fields['title'] + assert title_field.type == GraphQLString + assert title_field.type.name == 'String' + + author_field = article_field_type.fields['author'] + + author_field_type = author_field.type + assert isinstance(author_field_type, GraphQLObjectType) + recent_article_field = author_field_type.fields['recentArticle'] + + assert recent_article_field.type == BlogArticle + + feed_field = BlogQuery.fields['feed'] + assert feed_field.type.of_type == BlogArticle + + def defines_a_mutation_schema(): + BlogSchema = GraphQLSchema( + query=BlogQuery, + mutation=BlogMutation) + + assert BlogSchema.mutation_type == BlogMutation + + write_mutation = BlogMutation.fields['writeArticle'] + assert write_mutation.type == BlogArticle + assert write_mutation.type.name == 'Article' + + def defines_a_subscription_schema(): + BlogSchema = GraphQLSchema( + query=BlogQuery, + subscription=BlogSubscription) + + assert BlogSchema.subscription_type == BlogSubscription + + subscription = BlogSubscription.fields['articleSubscribe'] + assert subscription.type == BlogArticle + assert subscription.type.name == 'Article' + + def defines_an_enum_type_with_deprecated_value(): + EnumTypeWithDeprecatedValue = GraphQLEnumType( + name='EnumWithDeprecatedValue', + values={'foo': GraphQLEnumValue( + deprecation_reason='Just because')}) + + deprecated_value = EnumTypeWithDeprecatedValue.values['foo'] + assert deprecated_value == GraphQLEnumValue( + deprecation_reason='Just because') + assert deprecated_value.is_deprecated is True + assert deprecated_value.deprecation_reason == 'Just because' + assert deprecated_value.value is None + assert deprecated_value.ast_node is None + + def defines_an_enum_type_with_a_value_of_none_and_invalid(): + EnumTypeWithNullishValue = GraphQLEnumType( + name='EnumWithNullishValue', + values={'NULL': None, 'UNDEFINED': INVALID}) + + assert EnumTypeWithNullishValue.values == { + 'NULL': GraphQLEnumValue(), + 'UNDEFINED': GraphQLEnumValue(INVALID)} + null_value = EnumTypeWithNullishValue.values['NULL'] + assert null_value.description is None + assert null_value.is_deprecated is False + assert null_value.deprecation_reason is None + assert null_value.value is None + assert null_value.ast_node is None + undefined_value = EnumTypeWithNullishValue.values['UNDEFINED'] + assert undefined_value.description is None + assert undefined_value.is_deprecated is False + assert undefined_value.deprecation_reason is None + assert undefined_value.value is INVALID + assert undefined_value.ast_node is None + + def defines_an_object_type_with_deprecated_field(): + TypeWithDeprecatedField = GraphQLObjectType('foo', { + 'bar': GraphQLField(GraphQLString, + deprecation_reason='A terrible reason')}) + + deprecated_field = TypeWithDeprecatedField.fields['bar'] + assert deprecated_field == GraphQLField( + GraphQLString, deprecation_reason='A terrible reason') + assert deprecated_field.is_deprecated is True + assert deprecated_field.deprecation_reason == 'A terrible reason' + assert deprecated_field.type is GraphQLString + assert deprecated_field.args == {} + + def includes_nested_input_objects_in_the_map(): + NestedInputObject = GraphQLInputObjectType('NestedInputObject', { + 'value': GraphQLInputField(GraphQLString)}) + SomeInputObject = GraphQLInputObjectType('SomeInputObject', { + 'nested': GraphQLInputField(NestedInputObject)}) + SomeMutation = GraphQLObjectType('SomeMutation', { + 'mutateSomething': GraphQLField(BlogArticle, { + 'input': GraphQLArgument(SomeInputObject)})}) + SomeSubscription = GraphQLObjectType('SomeSubscription', { + 'subscribeToSomething': GraphQLField(BlogArticle, { + 'input': GraphQLArgument(SomeInputObject)})}) + schema = GraphQLSchema( + query=BlogQuery, + mutation=SomeMutation, + subscription=SomeSubscription) + assert schema.type_map['NestedInputObject'] is NestedInputObject + + def includes_interface_possible_types_in_the_type_map(): + SomeInterface = GraphQLInterfaceType('SomeInterface', { + 'f': GraphQLField(GraphQLInt)}) + SomeSubtype = GraphQLObjectType('SomeSubtype', { + 'f': GraphQLField(GraphQLInt)}, + interfaces=[SomeInterface]) + schema = GraphQLSchema( + query=GraphQLObjectType('Query', { + 'iface': GraphQLField(SomeInterface)}), + types=[SomeSubtype]) + assert schema.type_map['SomeSubtype'] is SomeSubtype + + def includes_interfaces_thunk_subtypes_in_the_type_map(): + SomeInterface = GraphQLInterfaceType('SomeInterface', { + 'f': GraphQLField(GraphQLInt)}) + SomeSubtype = GraphQLObjectType('SomeSubtype', { + 'f': GraphQLField(GraphQLInt)}, + interfaces=lambda: [SomeInterface]) + schema = GraphQLSchema( + query=GraphQLObjectType('Query', { + 'iface': GraphQLField(SomeInterface)}), + types=[SomeSubtype]) + assert schema.type_map['SomeSubtype'] is SomeSubtype + + def stringifies_simple_types(): + assert str(GraphQLInt) == 'Int' + assert str(BlogArticle) == 'Article' + assert str(InterfaceType) == 'Interface' + assert str(UnionType) == 'Union' + assert str(EnumType) == 'Enum' + assert str(InputObjectType) == 'InputObject' + assert str(GraphQLNonNull(GraphQLInt)) == 'Int!' + assert str(GraphQLList(GraphQLInt)) == '[Int]' + assert str(GraphQLNonNull(GraphQLList(GraphQLInt))) == '[Int]!' + assert str(GraphQLList(GraphQLNonNull(GraphQLInt))) == '[Int!]' + assert str(GraphQLList(GraphQLList(GraphQLInt))) == '[[Int]]' + + def identifies_input_types(): + expected = ( + (GraphQLInt, True), + (ObjectType, False), + (InterfaceType, False), + (UnionType, False), + (EnumType, True), + (InputObjectType, True)) + + for type_, answer in expected: + assert is_input_type(type_) is answer + assert is_input_type(GraphQLList(type_)) is answer + assert is_input_type(GraphQLNonNull(type_)) is answer + + def identifies_output_types(): + expected = ( + (GraphQLInt, True), + (ObjectType, True), + (InterfaceType, True), + (UnionType, True), + (EnumType, True), + (InputObjectType, False)) + + for type_, answer in expected: + assert is_output_type(type_) is answer + assert is_output_type(GraphQLList(type_)) is answer + assert is_output_type(GraphQLNonNull(type_)) is answer + + def prohibits_nesting_nonnull_inside_nonnull(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + GraphQLNonNull(GraphQLNonNull(GraphQLInt)) + msg = str(exc_info.value) + assert msg == ( + 'Can only create NonNull of a Nullable GraphQLType but got: Int!.') + + def allows_a_thunk_for_union_member_types(): + union = GraphQLUnionType('ThunkUnion', lambda: [ObjectType]) + + types = union.types + assert len(types) == 1 + assert types[0] is ObjectType + + def does_not_mutate_passed_field_definitions(): + fields = { + 'field1': GraphQLField(GraphQLString), + 'field2': GraphQLField(GraphQLString, args={ + 'id': GraphQLArgument(GraphQLString)})} + + TestObject1 = GraphQLObjectType('Test1', fields) + TestObject2 = GraphQLObjectType('Test2', fields) + + assert TestObject1.fields == TestObject2.fields + assert fields == { + 'field1': GraphQLField(GraphQLString), + 'field2': GraphQLField(GraphQLString, args={ + 'id': GraphQLArgument(GraphQLString)})} + + input_fields = { + 'field1': GraphQLInputField(GraphQLString), + 'field2': GraphQLInputField(GraphQLString)} + + TestInputObject1 = GraphQLInputObjectType('Test1', input_fields) + TestInputObject2 = GraphQLInputObjectType('Test2', input_fields) + + assert TestInputObject1.fields == TestInputObject2.fields + assert input_fields == { + 'field1': GraphQLInputField(GraphQLString), + 'field2': GraphQLInputField(GraphQLString)} + + +def describe_field_config_must_be_a_dict(): + + def accepts_an_object_type_with_a_field_function(): + obj_type = GraphQLObjectType('SomeObject', lambda: { + 'f': GraphQLField(GraphQLString)}) + assert obj_type.fields['f'].type is GraphQLString + + def thunk_for_fields_of_object_type_is_resolved_only_once(): + def fields(): + nonlocal calls + calls += 1 + return {'f': GraphQLField(GraphQLString)} + calls = 0 + obj_type = GraphQLObjectType('SomeObject', fields) + assert 'f' in obj_type.fields + assert calls == 1 + assert 'f' in obj_type.fields + assert calls == 1 + + def rejects_an_object_type_field_with_undefined_config(): + undefined_field = cast(GraphQLField, None) + obj_type = GraphQLObjectType('SomeObject', {'f': undefined_field}) + with raises(TypeError) as exc_info: + if obj_type.fields: + pass + msg = str(exc_info.value) + assert msg == ( + 'SomeObject fields must be GraphQLField or output type objects.') + + def rejects_an_object_type_with_incorrectly_typed_fields(): + invalid_field = cast(GraphQLField, [GraphQLField(GraphQLString)]) + obj_type = GraphQLObjectType('SomeObject', {'f': invalid_field}) + with raises(TypeError) as exc_info: + if obj_type.fields: + pass + msg = str(exc_info.value) + assert msg == ( + 'SomeObject fields must be GraphQLField or output type objects.') + + def accepts_an_object_type_with_output_type_as_field(): + # this is a shortcut syntax for simple fields + obj_type = GraphQLObjectType('SomeObject', {'f': GraphQLString}) + field = obj_type.fields['f'] + assert isinstance(field, GraphQLField) + assert field.type is GraphQLString + + def rejects_an_object_type_field_function_that_returns_incorrect_type(): + obj_type = GraphQLObjectType('SomeObject', + lambda: [GraphQLField(GraphQLString)]) + with raises(TypeError) as exc_info: + if obj_type.fields: + pass + msg = str(exc_info.value) + assert msg == ( + 'SomeObject fields must be a dict with field names as keys' + ' or a function which returns such an object.') + + +def describe_field_args_must_be_a_dict(): + + def accepts_an_object_type_with_field_args(): + obj_type = GraphQLObjectType('SomeObject', { + 'goodField': GraphQLField(GraphQLString, args={ + 'goodArg': GraphQLArgument(GraphQLString)})}) + assert 'goodArg' in obj_type.fields['goodField'].args + + def rejects_an_object_type_with_incorrectly_typed_field_args(): + invalid_args = [{'bad_args': GraphQLArgument(GraphQLString)}] + invalid_args = cast(Dict[str, GraphQLArgument], invalid_args) + with raises(TypeError) as exc_info: + GraphQLObjectType('SomeObject', { + 'badField': GraphQLField(GraphQLString, args=invalid_args)}) + msg = str(exc_info.value) + assert msg == ( + 'Field args must be a dict with argument names as keys.') + + def does_not_accept_is_deprecated_as_argument(): + kwargs = dict(is_deprecated=True) + with raises(TypeError) as exc_info: + GraphQLObjectType('OldObject', { + 'field': GraphQLField(GraphQLString, **kwargs)}) + msg = str(exc_info.value) + assert "got an unexpected keyword argument 'is_deprecated'" in msg + + +def describe_object_interfaces_must_be_a_sequence(): + + def accepts_an_object_type_with_list_interfaces(): + obj_type = GraphQLObjectType( + 'SomeObject', interfaces=[InterfaceType], + fields={'f': GraphQLField(GraphQLString)}) + assert obj_type.interfaces == [InterfaceType] + + def accepts_object_type_with_interfaces_as_a_function_returning_a_list(): + obj_type = GraphQLObjectType( + 'SomeObject', interfaces=lambda: [InterfaceType], + fields={'f': GraphQLField(GraphQLString)}) + assert obj_type.interfaces == [InterfaceType] + + def thunk_for_interfaces_of_object_type_is_resolved_only_once(): + def interfaces(): + nonlocal calls + calls += 1 + return [InterfaceType] + calls = 0 + obj_type = GraphQLObjectType( + 'SomeObject', interfaces=interfaces, + fields={'f': GraphQLField(GraphQLString)}) + assert obj_type.interfaces == [InterfaceType] + assert calls == 1 + assert obj_type.interfaces == [InterfaceType] + assert calls == 1 + + def rejects_an_object_type_with_incorrectly_typed_interfaces(): + obj_type = GraphQLObjectType( + 'SomeObject', interfaces={}, + fields={'f': GraphQLField(GraphQLString)}) + with raises(TypeError) as exc_info: + if obj_type.interfaces: + pass + msg = str(exc_info.value) + assert msg == ( + 'SomeObject interfaces must be a list/tuple' + ' or a function which returns a list/tuple.') + + def rejects_object_type_with_incorrectly_typed_interfaces_as_a_function(): + obj_type = GraphQLObjectType( + 'SomeObject', interfaces=lambda: {}, + fields={'f': GraphQLField(GraphQLString)}) + with raises(TypeError) as exc_info: + if obj_type.interfaces: + pass + msg = str(exc_info.value) + assert msg == ( + 'SomeObject interfaces must be a list/tuple' + ' or a function which returns a list/tuple.') + + +def describe_type_system_object_fields_must_have_valid_resolve_values(): + + @fixture + def schema_with_object_with_field_resolver(resolve_value): + BadResolverType = GraphQLObjectType('BadResolver', { + 'bad_field': GraphQLField(GraphQLString, resolve=resolve_value)}) + return GraphQLSchema(GraphQLObjectType('Query', { + 'f': GraphQLField(BadResolverType)})) + + def accepts_a_lambda_as_an_object_field_resolver(): + schema_with_object_with_field_resolver(lambda _obj, _info: {}) + + def rejects_an_empty_object_field_resolver(): + with raises(TypeError) as exc_info: + schema_with_object_with_field_resolver({}) + msg = str(exc_info.value) + assert msg == ( + 'Field resolver must be a function if provided, but got: {}.') + + def rejects_a_constant_scalar_value_resolver(): + with raises(TypeError) as exc_info: + schema_with_object_with_field_resolver(0) + msg = str(exc_info.value) + assert msg == ( + 'Field resolver must be a function if provided, but got: 0.') + + +def describe_type_system_interface_types_must_be_resolvable(): + + def accepts_an_interface_type_defining_resolve_type(): + AnotherInterfaceType = GraphQLInterfaceType('AnotherInterface', { + 'f': GraphQLField(GraphQLString)}) + + schema = schema_with_field_type(GraphQLObjectType('SomeObject', { + 'f': GraphQLField(GraphQLString)}, [AnotherInterfaceType])) + + assert schema.query_type.fields[ + 'field'].type.interfaces[0] is AnotherInterfaceType + + def rejects_an_interface_type_with_an_incorrect_type_for_resolve_type(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + GraphQLInterfaceType('AnotherInterface', { + 'f': GraphQLField(GraphQLString)}, resolve_type={}) + msg = str(exc_info.value) + assert msg == ( + "AnotherInterface must provide 'resolve_type' as a function," + ' but got: {}.') + + +def describe_type_system_union_types_must_be_resolvable(): + + ObjectWithIsTypeOf = GraphQLObjectType('ObjectWithIsTypeOf', { + 'f': GraphQLField(GraphQLString)}) + + def accepts_a_union_type_defining_resolve_type(): + schema_with_field_type(GraphQLUnionType('SomeUnion', [ObjectType])) + + def accepts_a_union_of_object_types_defining_is_type_of(): + schema_with_field_type(GraphQLUnionType( + 'SomeUnion', [ObjectWithIsTypeOf])) + + def rejects_an_interface_type_with_an_incorrect_type_for_resolve_type(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + schema_with_field_type(GraphQLUnionType( + 'SomeUnion', [ObjectWithIsTypeOf], resolve_type={})) + msg = str(exc_info.value) + assert msg == ( + "SomeUnion must provide 'resolve_type' as a function," + ' but got: {}.') + + +def describe_type_system_scalar_types_must_be_serializable(): + + def accepts_a_scalar_type_defining_serialize(): + schema_with_field_type(GraphQLScalarType('SomeScalar', lambda: None)) + + def rejects_a_scalar_type_not_defining_serialize(): + with raises(TypeError) as exc_info: + # noinspection PyArgumentList + schema_with_field_type(GraphQLScalarType('SomeScalar')) + msg = str(exc_info.value) + assert "missing 1 required positional argument: 'serialize'" in msg + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + schema_with_field_type(GraphQLScalarType('SomeScalar', None)) + msg = str(exc_info.value) + assert msg == ( + "SomeScalar must provide 'serialize' function." + ' If this custom Scalar is also used as an input type,' + " ensure 'parse_value' and 'parse_literal' functions" + ' are also provided.') + + def rejects_a_scalar_type_defining_serialize_with_incorrect_type(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + schema_with_field_type(GraphQLScalarType('SomeScalar', {})) + msg = str(exc_info.value) + assert msg == ( + "SomeScalar must provide 'serialize' function." + ' If this custom Scalar is also used as an input type,' + " ensure 'parse_value' and 'parse_literal' functions" + ' are also provided.') + + def accepts_a_scalar_type_defining_parse_value_and_parse_literal(): + schema_with_field_type(GraphQLScalarType( + 'SomeScalar', serialize=lambda: None, + parse_value=lambda: None, parse_literal=lambda: None)) + + def rejects_a_scalar_type_defining_parse_value_but_not_parse_literal(): + with raises(TypeError) as exc_info: + schema_with_field_type(GraphQLScalarType( + 'SomeScalar', lambda: None, parse_value=lambda: None)) + msg = str(exc_info.value) + assert msg == ('SomeScalar must provide both' + " 'parse_value' and 'parse_literal' functions.") + + def rejects_a_scalar_type_defining_parse_literal_but_not_parse_value(): + with raises(TypeError) as exc_info: + schema_with_field_type(GraphQLScalarType( + 'SomeScalar', lambda: None, parse_literal=lambda: None)) + msg = str(exc_info.value) + assert msg == ('SomeScalar must provide both' + " 'parse_value' and 'parse_literal' functions.") + + def rejects_a_scalar_type_incorrectly_defining_parse_literal_and_value(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + schema_with_field_type(GraphQLScalarType( + 'SomeScalar', lambda: None, parse_value={}, parse_literal={})) + msg = str(exc_info.value) + assert msg == ('SomeScalar must provide both' + " 'parse_value' and 'parse_literal' functions.") + + +def describe_type_system_object_types_must_be_assertable(): + + def accepts_an_object_type_with_an_is_type_of_function(): + schema_with_field_type(GraphQLObjectType('AnotherObject', { + 'f': GraphQLField(GraphQLString)})) + + def rejects_an_object_type_with_an_incorrect_type_for_is_type_of(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + schema_with_field_type(GraphQLObjectType('AnotherObject', { + 'f': GraphQLField(GraphQLString)}, is_type_of={})) + msg = str(exc_info.value) + assert msg == ( + "AnotherObject must provide 'is_type_of' as a function," + ' but got: {}.') + + +def describe_union_types_must_be_list(): + + def accepts_a_union_type_with_list_types(): + schema_with_field_type(GraphQLUnionType('SomeUnion', [ObjectType])) + + def accepts_a_union_type_with_function_returning_a_list_of_types(): + schema_with_field_type(GraphQLUnionType( + 'SomeUnion', lambda: [ObjectType])) + + def rejects_a_union_type_without_types(): + with raises(TypeError) as exc_info: + # noinspection PyArgumentList + schema_with_field_type(GraphQLUnionType('SomeUnion')) + msg = str(exc_info.value) + assert "missing 1 required positional argument: 'types'" in msg + schema_with_field_type(GraphQLUnionType('SomeUnion', None)) + + def rejects_a_union_type_with_incorrectly_typed_types(): + with raises(TypeError) as exc_info: + schema_with_field_type(GraphQLUnionType( + 'SomeUnion', {'type': ObjectType})) + msg = str(exc_info.value) + assert msg == ( + 'SomeUnion types must be a list/tuple' + ' or a function which returns a list/tuple.') + + +def describe_type_system_input_objects_must_have_fields(): + + def accepts_an_input_object_type_with_fields(): + input_obj_type = GraphQLInputObjectType('SomeInputObject', { + 'f': GraphQLInputField(GraphQLString)}) + assert input_obj_type.fields['f'].type is GraphQLString + + def accepts_an_input_object_type_with_a_field_function(): + input_obj_type = GraphQLInputObjectType('SomeInputObject', lambda: { + 'f': GraphQLInputField(GraphQLString)}) + assert input_obj_type.fields['f'].type is GraphQLString + + def rejects_an_input_object_type_with_incorrect_fields(): + input_obj_type = GraphQLInputObjectType('SomeInputObject', []) + with raises(TypeError) as exc_info: + if input_obj_type.fields: + pass + msg = str(exc_info.value) + assert msg == ( + 'SomeInputObject fields must be a dict with field names as keys' + ' or a function which returns such an object.') + + def accepts_an_input_object_type_with_input_type_as_field(): + # this is a shortcut syntax for simple input fields + input_obj_type = GraphQLInputObjectType('SomeInputObject', { + 'f': GraphQLString}) + field = input_obj_type.fields['f'] + assert isinstance(field, GraphQLInputField) + assert field.type is GraphQLString + + def rejects_an_input_object_type_with_incorrect_fields_function(): + input_obj_type = GraphQLInputObjectType('SomeInputObject', lambda: []) + with raises(TypeError) as exc_info: + if input_obj_type.fields: + pass + msg = str(exc_info.value) + assert msg == ( + 'SomeInputObject fields must be a dict with field names as keys' + ' or a function which returns such an object.') + + +def describe_type_system_input_objects_fields_must_not_have_resolvers(): + + def rejects_an_input_object_type_with_resolvers(): + with raises(TypeError) as exc_info: + # noinspection PyArgumentList + GraphQLInputObjectType('SomeInputObject', { + 'f': GraphQLInputField(GraphQLString, resolve=lambda: 0)}) + msg = str(exc_info.value) + assert "got an unexpected keyword argument 'resolve'" in msg + input_obj_type = GraphQLInputObjectType('SomeInputObject', { + 'f': GraphQLField(GraphQLString, resolve=lambda: 0)}) + with raises(TypeError) as exc_info: + if input_obj_type.fields: + pass + msg = str(exc_info.value) + assert msg == ( + 'SomeInputObject fields must be GraphQLInputField' + ' or input type objects.') + + def rejects_an_input_object_type_with_resolver_constant(): + with raises(TypeError) as exc_info: + # noinspection PyArgumentList + GraphQLInputObjectType('SomeInputObject', { + 'f': GraphQLInputField(GraphQLString, resolve={})}) + msg = str(exc_info.value) + assert "got an unexpected keyword argument 'resolve'" in msg + + +def describe_type_system_enum_types_must_be_well_defined(): + + def accepts_a_well_defined_enum_type_with_empty_value_definition(): + enum_type = GraphQLEnumType('SomeEnum', {'FOO': None, 'BAR': None}) + assert enum_type.values['FOO'].value is None + assert enum_type.values['BAR'].value is None + + def accepts_a_well_defined_enum_type_with_internal_value_definition(): + enum_type = GraphQLEnumType('SomeEnum', {'FOO': 10, 'BAR': 20}) + assert enum_type.values['FOO'].value == 10 + assert enum_type.values['BAR'].value == 20 + enum_type = GraphQLEnumType('SomeEnum', { + 'FOO': GraphQLEnumValue(10), + 'BAR': GraphQLEnumValue(20)}) + assert enum_type.values['FOO'].value == 10 + assert enum_type.values['BAR'].value == 20 + + def rejects_an_enum_type_with_incorrectly_typed_values(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + GraphQLEnumType('SomeEnum', [{'FOO': 10}]) # type: ignore + msg = str(exc_info.value) + assert msg == ( + 'SomeEnum values must be an Enum' + ' or a dict with value names as keys.') + + def does_not_allow_is_deprecated(): + with raises(TypeError) as exc_info: + # noinspection PyArgumentList + GraphQLEnumType('SomeEnum', { + 'FOO': GraphQLEnumValue(is_deprecated=True)}) + msg = str(exc_info.value) + assert "got an unexpected keyword argument 'is_deprecated'" in msg + + +def describe_type_system_list_must_accept_only_types(): + + types = [ + GraphQLString, ScalarType, ObjectType, + UnionType, InterfaceType, EnumType, InputObjectType, + GraphQLList(GraphQLString), GraphQLNonNull(GraphQLString)] + + not_types = [{}, dict, str, object, None] + + @mark.parametrize('type_', types) + def accepts_a_type_as_item_type_of_list(type_): + assert GraphQLList(type_) + + @mark.parametrize('type_', not_types) + def rejects_a_non_type_as_item_type_of_list(type_): + with raises(TypeError) as exc_info: + assert GraphQLList(type_) + msg = str(exc_info.value) + assert msg == ( + 'Can only create a wrapper for a GraphQLType,' + f' but got: {type_}.') + + +def describe_type_system_non_null_must_only_accept_non_nullable_types(): + + nullable_types = [ + GraphQLString, ScalarType, ObjectType, + UnionType, InterfaceType, EnumType, InputObjectType, + GraphQLList(GraphQLString), GraphQLList(GraphQLNonNull(GraphQLString))] + + not_nullable_types = [ + GraphQLNonNull(GraphQLString), {}, dict, str, object, None] + + @mark.parametrize('type_', nullable_types) + def accepts_a_type_as_nullable_type_of_non_null(type_): + assert GraphQLNonNull(type_) + + @mark.parametrize('type_', not_nullable_types) + def rejects_a_non_type_as_nullable_type_of_non_null(type_): + with raises(TypeError) as exc_info: + assert GraphQLNonNull(type_) + msg = str(exc_info.value) + assert msg == ( + 'Can only create NonNull of a Nullable GraphQLType' + f' but got: {type_}.') if isinstance(type_, GraphQLNonNull) else ( + 'Can only create a wrapper for a GraphQLType,' + f' but got: {type_}.') + + +def describe_type_system_a_schema_must_contain_uniquely_named_types(): + + def rejects_a_schema_which_redefines_a_built_in_type(): + FakeString = GraphQLScalarType('String', serialize=lambda: None) + + QueryType = GraphQLObjectType('Query', { + 'normal': GraphQLField(GraphQLString), + 'fake': GraphQLField(FakeString)}) + + with raises(TypeError) as exc_info: + GraphQLSchema(QueryType) + msg = str(exc_info.value) + assert msg == ( + 'Schema must contain unique named types' + f" but contains multiple types named 'String'.") + + def rejects_a_schema_which_defines_an_object_twice(): + A = GraphQLObjectType('SameName', {'f': GraphQLField(GraphQLString)}) + B = GraphQLObjectType('SameName', {'f': GraphQLField(GraphQLString)}) + + QueryType = GraphQLObjectType('Query', {'a': A, 'b': B}) + + with raises(TypeError) as exc_info: + GraphQLSchema(QueryType) + msg = str(exc_info.value) + assert msg == ( + 'Schema must contain unique named types' + f" but contains multiple types named 'SameName'.") + + def rejects_a_schema_with_same_named_objects_implementing_an_interface(): + AnotherInterface = GraphQLInterfaceType('AnotherInterface', { + 'f': GraphQLField(GraphQLString)}) + + FirstBadObject = GraphQLObjectType( + 'BadObject', {'f': GraphQLField(GraphQLString)}, + interfaces=[AnotherInterface]) + + SecondBadObject = GraphQLObjectType( + 'BadObject', {'f': GraphQLField(GraphQLString)}, + interfaces=[AnotherInterface]) + + QueryType = GraphQLObjectType('Query', { + 'iface': GraphQLField(AnotherInterface)}) + + with raises(TypeError) as exc_info: + GraphQLSchema(QueryType, types=[FirstBadObject, SecondBadObject]) + msg = str(exc_info.value) + assert msg == ( + 'Schema must contain unique named types' + f" but contains multiple types named 'BadObject'.") diff --git a/tests/type/test_enum.py b/tests/type/test_enum.py new file mode 100644 index 00000000..8b930773 --- /dev/null +++ b/tests/type/test_enum.py @@ -0,0 +1,251 @@ +from enum import Enum + +from graphql import graphql_sync +from graphql.type import ( + GraphQLArgument, GraphQLBoolean, GraphQLEnumType, GraphQLField, + GraphQLInt, GraphQLObjectType, GraphQLSchema, GraphQLString) +from graphql.utilities import introspection_from_schema + +ColorType = GraphQLEnumType('Color', values={ + 'RED': 0, + 'GREEN': 1, + 'BLUE': 2}) + + +class ColorTypeEnumValues(Enum): + RED = 0 + GREEN = 1 + BLUE = 2 + + +class Complex1: + # noinspection PyMethodMayBeStatic + def some_random_function(self): + return {} + + +class Complex2: + some_random_value = 123 + + def __repr__(self): + return 'Complex2' + + +complex1 = Complex1() +complex2 = Complex2() + +ComplexEnum = GraphQLEnumType('Complex', { + 'ONE': complex1, + 'TWO': complex2}) + +ColorType2 = GraphQLEnumType('Color', ColorTypeEnumValues) + +QueryType = GraphQLObjectType('Query', { + 'colorEnum': GraphQLField(ColorType, args={ + 'fromEnum': GraphQLArgument(ColorType), + 'fromInt': GraphQLArgument(GraphQLInt), + 'fromString': GraphQLArgument(GraphQLString)}, + resolve=lambda value, info, **args: + args.get('fromInt') or + args.get('fromString') or args.get('fromEnum')), + 'colorInt': GraphQLField(GraphQLInt, args={ + 'fromEnum': GraphQLArgument(ColorType), + 'fromInt': GraphQLArgument(GraphQLInt)}, + resolve=lambda value, info, **args: + args.get('fromInt') or args.get('fromEnum')), + 'complexEnum': GraphQLField(ComplexEnum, args={ + # Note: default_value is provided an *internal* representation for + # Enums, rather than the string name. + 'fromEnum': GraphQLArgument(ComplexEnum, default_value=complex1), + 'provideGoodValue': GraphQLArgument(GraphQLBoolean), + 'provideBadValue': GraphQLArgument(GraphQLBoolean)}, + resolve=lambda value, info, **args: + # Note: this is one of the references of the internal values + # which ComplexEnum allows. + complex2 if args.get('provideGoodValue') + # Note: similar object, but not the same *reference* as + # complex2 above. Enum internal values require object equality. + else Complex2() if args.get('provideBadValue') + else args.get('fromEnum'))}) + +MutationType = GraphQLObjectType('Mutation', { + 'favoriteEnum': GraphQLField(ColorType, args={ + 'color': GraphQLArgument(ColorType)}, + resolve=lambda value, info, color=None: color)}) + +SubscriptionType = GraphQLObjectType('Subscription', { + 'subscribeToEnum': GraphQLField(ColorType, args={ + 'color': GraphQLArgument(ColorType)}, + resolve=lambda value, info, color=None: color)}) + +schema = GraphQLSchema( + query=QueryType, mutation=MutationType, subscription=SubscriptionType) + + +def execute_query(source, variable_values=None): + return graphql_sync(schema, source, variable_values=variable_values) + + +def describe_type_system_enum_values(): + + def can_use_python_enums_instead_of_dicts(): + assert ColorType2.values == ColorType.values + keys = [key for key in ColorType.values] + keys2 = [key for key in ColorType2.values] + assert keys2 == keys + values = [value.value for value in ColorType.values.values()] + values2 = [value.value for value in ColorType2.values.values()] + assert values2 == values + + def accepts_enum_literals_as_input(): + result = execute_query('{ colorInt(fromEnum: GREEN) }') + + assert result == ({'colorInt': 1}, None) + + def enum_may_be_output_type(): + result = execute_query('{ colorEnum(fromInt: 1) }') + + assert result == ({'colorEnum': 'GREEN'}, None) + + def enum_may_be_both_input_and_output_type(): + result = execute_query('{ colorEnum(fromEnum: GREEN) }') + + assert result == ({'colorEnum': 'GREEN'}, None) + + def does_not_accept_string_literals(): + result = execute_query('{ colorEnum(fromEnum: "GREEN") }') + + assert result == (None, [{ + 'message': 'Expected type Color, found "GREEN";' + ' Did you mean the enum value GREEN?', + 'locations': [(1, 23)]}]) + + def does_not_accept_values_not_in_the_enum(): + result = execute_query('{ colorEnum(fromEnum: GREENISH) }') + + assert result == (None, [{ + 'message': 'Expected type Color, found GREENISH;' + ' Did you mean the enum value GREEN?', + 'locations': [(1, 23)]}]) + + def does_not_accept_values_with_incorrect_casing(): + result = execute_query('{ colorEnum(fromEnum: green) }') + + assert result == (None, [{ + 'message': 'Expected type Color, found green;' + ' Did you mean the enum value GREEN?', + 'locations': [(1, 23)]}]) + + def does_not_accept_incorrect_internal_value(): + result = execute_query('{ colorEnum(fromString: "GREEN") }') + + assert result == ({'colorEnum': None}, [{ + 'message': "Expected a value of type 'Color'" + " but received: 'GREEN'", + 'locations': [(1, 3)], 'path': ['colorEnum']}]) + + def does_not_accept_internal_value_in_place_of_enum_literal(): + result = execute_query('{ colorEnum(fromEnum: 1) }') + + assert result == (None, [{ + 'message': "Expected type Color, found 1.", + 'locations': [(1, 23)]}]) + + def does_not_accept_internal_value_in_place_of_int(): + result = execute_query('{ colorEnum(fromInt: GREEN) }') + + assert result == (None, [{ + 'message': "Expected type Int, found GREEN.", + 'locations': [(1, 22)]}]) + + def accepts_json_string_as_enum_variable(): + doc = 'query ($color: Color!) { colorEnum(fromEnum: $color) }' + result = execute_query(doc, {'color': 'BLUE'}) + + assert result == ({'colorEnum': 'BLUE'}, None) + + def accepts_enum_literals_as_input_arguments_to_mutations(): + doc = 'mutation ($color: Color!) { favoriteEnum(color: $color) }' + result = execute_query(doc, {'color': 'GREEN'}) + + assert result == ({'favoriteEnum': 'GREEN'}, None) + + def accepts_enum_literals_as_input_arguments_to_subscriptions(): + doc = ('subscription ($color: Color!) {' + ' subscribeToEnum(color: $color) }') + result = execute_query(doc, {'color': 'GREEN'}) + + assert result == ({'subscribeToEnum': 'GREEN'}, None) + + def does_not_accept_internal_value_as_enum_variable(): + doc = 'query ($color: Color!) { colorEnum(fromEnum: $color) }' + result = execute_query(doc, {'color': 2}) + + assert result == (None, [{ + 'message': "Variable '$color' got invalid value 2;" + ' Expected type Color.', + 'locations': [(1, 8)]}]) + + def does_not_accept_string_variables_as_enum_input(): + doc = 'query ($color: String!) { colorEnum(fromEnum: $color) }' + result = execute_query(doc, {'color': 'BLUE'}) + + assert result == (None, [{ + 'message': "Variable '$color' of type 'String!'" + " used in position expecting type 'Color'.", + 'locations': [(1, 8), (1, 47)]}]) + + def does_not_accept_internal_value_variable_as_enum_input(): + doc = 'query ($color: Int!) { colorEnum(fromEnum: $color) }' + result = execute_query(doc, {'color': 2}) + + assert result == (None, [{ + 'message': "Variable '$color' of type 'Int!'" + " used in position expecting type 'Color'.", + 'locations': [(1, 8), (1, 44)]}]) + + def enum_value_may_have_an_internal_value_of_0(): + result = execute_query(""" + { + colorEnum(fromEnum: RED) + colorInt(fromEnum: RED) + } + """) + + assert result == ({'colorEnum': 'RED', 'colorInt': 0}, None) + + def enum_inputs_may_be_nullable(): + result = execute_query(""" + { + colorEnum + colorInt + } + """) + + assert result == ({'colorEnum': None, 'colorInt': None}, None) + + def presents_a_values_property_for_complex_enums(): + values = ComplexEnum.values + assert len(values) == 2 + assert isinstance(values, dict) + assert values['ONE'].value is complex1 + assert values['TWO'].value is complex2 + + def may_be_internally_represented_with_complex_values(): + result = execute_query(""" + { + first: complexEnum + second: complexEnum(fromEnum: TWO) + good: complexEnum(provideGoodValue: true) + bad: complexEnum(provideBadValue: true) + } + """) + + assert result == ({ + 'first': 'ONE', 'second': 'TWO', 'good': 'TWO', 'bad': None}, + [{'message': + "Expected a value of type 'Complex' but received: Complex2", + 'locations': [(6, 15)], 'path': ['bad']}]) + + def can_be_introspected_without_error(): + introspection_from_schema(schema) diff --git a/tests/type/test_introspection.py b/tests/type/test_introspection.py new file mode 100644 index 00000000..61fe5624 --- /dev/null +++ b/tests/type/test_introspection.py @@ -0,0 +1,1175 @@ +from graphql import graphql_sync +from graphql.type import ( + GraphQLArgument, GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLInputField, GraphQLInputObjectType, GraphQLList, + GraphQLObjectType, GraphQLSchema, GraphQLString) +from graphql.utilities import get_introspection_query +from graphql.validation.rules.provided_required_arguments import ( + missing_field_arg_message) + + +def describe_introspection(): + + def executes_an_introspection_query(): + EmptySchema = GraphQLSchema(GraphQLObjectType('QueryRoot', { + 'onlyField': GraphQLField(GraphQLString)})) + + query = get_introspection_query(descriptions=False) + result = graphql_sync(EmptySchema, query) + assert result.errors is None + assert result.data == { + '__schema': { + 'mutationType': None, + 'subscriptionType': None, + 'queryType': { + 'name': 'QueryRoot' + }, + 'types': [{ + 'kind': 'OBJECT', + 'name': 'QueryRoot', + 'fields': [{ + 'name': 'onlyField', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None, + }, + 'isDeprecated': False, + 'deprecationReason': None + }], + 'inputFields': None, + 'interfaces': [], + 'enumValues': None, + 'possibleTypes': None + }, { + 'kind': 'SCALAR', + 'name': 'String', + 'fields': None, + 'inputFields': None, + 'interfaces': None, + 'enumValues': None, + 'possibleTypes': None + }, { + 'kind': 'OBJECT', + 'name': '__Schema', + 'fields': [{ + 'name': 'types', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__Type', + 'ofType': None + } + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'queryType', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__Type', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'mutationType', + 'args': [], + 'type': { + 'kind': 'OBJECT', + 'name': '__Type', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'subscriptionType', + 'args': [], + 'type': { + 'kind': 'OBJECT', + 'name': '__Type', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'directives', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__Directive', + 'ofType': None + } + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }], + 'inputFields': None, + 'interfaces': [], + 'enumValues': None, + 'possibleTypes': None + }, { + 'kind': 'OBJECT', + 'name': '__Type', + 'fields': [{ + 'name': 'kind', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'ENUM', + 'name': '__TypeKind', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'name', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'description', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'fields', + 'args': [{ + 'name': 'includeDeprecated', + 'type': { + 'kind': 'SCALAR', + 'name': 'Boolean', + 'ofType': None + }, + 'defaultValue': 'false' + }], + 'type': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__Field', + 'ofType': None + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'interfaces', + 'args': [], + 'type': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__Type', + 'ofType': None + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'possibleTypes', + 'args': [], + 'type': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__Type', + 'ofType': None + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'enumValues', + 'args': [{ + 'name': 'includeDeprecated', + 'type': { + 'kind': 'SCALAR', + 'name': 'Boolean', + 'ofType': None + }, + 'defaultValue': 'false' + }], + 'type': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__EnumValue', + 'ofType': None + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'inputFields', + 'args': [], + 'type': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__InputValue', + 'ofType': None + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'ofType', + 'args': [], + 'type': { + 'kind': 'OBJECT', + 'name': '__Type', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }], + 'inputFields': None, + 'interfaces': [], + 'enumValues': None, + 'possibleTypes': None + }, { + 'kind': 'ENUM', + 'name': '__TypeKind', + 'fields': None, + 'inputFields': None, + 'interfaces': None, + 'enumValues': [{ + 'name': 'SCALAR', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'OBJECT', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'INTERFACE', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'UNION', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'ENUM', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'INPUT_OBJECT', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'LIST', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'NON_NULL', + 'isDeprecated': False, + 'deprecationReason': None + }], + 'possibleTypes': None + }, { + 'kind': 'SCALAR', + 'name': 'Boolean', + 'fields': None, + 'inputFields': None, + 'interfaces': None, + 'enumValues': None, + 'possibleTypes': None + }, { + 'kind': 'OBJECT', + 'name': '__Field', + 'fields': [{ + 'name': 'name', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'description', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'args', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__InputValue', + 'ofType': None + } + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'type', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__Type', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'isDeprecated', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'Boolean', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'deprecationReason', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }], + 'inputFields': None, + 'interfaces': [], + 'enumValues': None, + 'possibleTypes': None + }, { + 'kind': 'OBJECT', + 'name': '__InputValue', + 'fields': [{ + 'name': 'name', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'description', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'type', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__Type', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'defaultValue', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }], + 'inputFields': None, + 'interfaces': [], + 'enumValues': None, + 'possibleTypes': None + }, { + 'kind': 'OBJECT', + 'name': '__EnumValue', + 'fields': [{ + 'name': 'name', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'description', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'isDeprecated', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'Boolean', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'deprecationReason', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }], + 'inputFields': None, + 'interfaces': [], + 'enumValues': None, + 'possibleTypes': None + }, { + 'kind': 'OBJECT', + 'name': '__Directive', + 'fields': [{ + 'name': 'name', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'description', + 'args': [], + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'locations', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'ENUM', + 'name': '__DirectiveLocation', + 'ofType': None + } + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'args', + 'args': [], + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'OBJECT', + 'name': '__InputValue', + 'ofType': None + } + } + } + }, + 'isDeprecated': False, + 'deprecationReason': None + }], + 'inputFields': None, + 'interfaces': [], + 'enumValues': None, + 'possibleTypes': None + }, { + 'kind': 'ENUM', + 'name': '__DirectiveLocation', + 'fields': None, + 'inputFields': None, + 'interfaces': None, + 'enumValues': [{ + 'name': 'QUERY', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'MUTATION', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'SUBSCRIPTION', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'FIELD', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'FRAGMENT_DEFINITION', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'FRAGMENT_SPREAD', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'INLINE_FRAGMENT', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'SCHEMA', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'SCALAR', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'OBJECT', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'FIELD_DEFINITION', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'ARGUMENT_DEFINITION', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'INTERFACE', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'UNION', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'ENUM', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'ENUM_VALUE', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'INPUT_OBJECT', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'INPUT_FIELD_DEFINITION', + 'isDeprecated': False, + 'deprecationReason': None + }], + 'possibleTypes': None + }], + 'directives': [{ + 'name': 'include', + 'locations': [ + 'FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT'], + 'args': [{ + 'defaultValue': None, + 'name': 'if', + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'Boolean', + 'ofType': None + } + } + }] + }, { + 'name': 'skip', + 'locations': [ + 'FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT'], + 'args': [{ + 'defaultValue': None, + 'name': 'if', + 'type': { + 'kind': 'NON_NULL', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'Boolean', + 'ofType': None + } + } + }] + }, { + 'name': 'deprecated', + 'locations': ['FIELD_DEFINITION', 'ENUM_VALUE'], + 'args': [{ + 'defaultValue': '"No longer supported"', + 'name': 'reason', + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + } + }] + }] + } + } + + def introspects_on_input_object(): + TestInputObject = GraphQLInputObjectType('TestInputObject', { + 'a': GraphQLInputField(GraphQLString, + default_value='tes\t de\fault'), + 'b': GraphQLInputField(GraphQLList(GraphQLString)), + 'c': GraphQLInputField(GraphQLString, default_value=None)}) + + TestType = GraphQLObjectType('TestType', { + 'field': GraphQLField(GraphQLString, args={ + 'complex': GraphQLArgument(TestInputObject)}, + resolve=lambda obj, info, **args: repr(args.get('complex')))}) + + schema = GraphQLSchema(TestType) + request = """ + { + __type(name: "TestInputObject") { + kind + name + inputFields { + name + type { ...TypeRef } + defaultValue + } + } + } + + fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } + """ + + assert graphql_sync(schema, request) == ({ + '__type': { + 'kind': 'INPUT_OBJECT', + 'name': 'TestInputObject', + 'inputFields': [{ + 'name': 'a', + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'defaultValue': '"tes\\t de\\fault"' + }, { + 'name': 'b', + 'type': { + 'kind': 'LIST', + 'name': None, + 'ofType': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + }, + 'defaultValue': None, + }, { + 'name': 'c', + 'type': { + 'kind': 'SCALAR', + 'name': 'String', + 'ofType': None + }, + 'defaultValue': 'null' + }] + } + }, None) + + def supports_the_type_root_field(): + TestType = GraphQLObjectType('TestType', { + 'testField': GraphQLField(GraphQLString)}) + + schema = GraphQLSchema(TestType) + request = """ + { + __type(name: "TestType") { + name + } + } + """ + + assert graphql_sync(schema, request) == ({ + '__type': { + 'name': 'TestType', + } + }, None) + + def identifies_deprecated_fields(): + TestType = GraphQLObjectType('TestType', { + 'nonDeprecated': GraphQLField(GraphQLString), + 'deprecated': GraphQLField( + GraphQLString, deprecation_reason='Removed in 1.0')}) + + schema = GraphQLSchema(TestType) + request = """ + { + __type(name: "TestType") { + name + fields(includeDeprecated: true) { + name + isDeprecated, + deprecationReason + } + } + } + """ + + assert graphql_sync(schema, request) == ({ + '__type': { + 'name': 'TestType', + 'fields': [{ + 'name': 'nonDeprecated', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'deprecated', + 'isDeprecated': True, + 'deprecationReason': 'Removed in 1.0' + }] + } + }, None) + + def respects_the_include_deprecated_parameter_for_fields(): + TestType = GraphQLObjectType('TestType', { + 'nonDeprecated': GraphQLField(GraphQLString), + 'deprecated': GraphQLField( + GraphQLString, deprecation_reason='Removed in 1.0')}) + + schema = GraphQLSchema(TestType) + request = """ + { + __type(name: "TestType") { + name + trueFields: fields(includeDeprecated: true) { + name + } + falseFields: fields(includeDeprecated: false) { + name + } + omittedFields: fields { + name + } + } + } + """ + + assert graphql_sync(schema, request) == ({ + '__type': { + 'name': 'TestType', + 'trueFields': [{ + 'name': 'nonDeprecated', + }, { + 'name': 'deprecated', + }], + 'falseFields': [{ + 'name': 'nonDeprecated', + }], + 'omittedFields': [{ + 'name': 'nonDeprecated', + }] + } + }, None) + + def identifies_deprecated_enum_values(): + TestEnum = GraphQLEnumType('TestEnum', { + 'NONDEPRECATED': GraphQLEnumValue(0), + 'DEPRECATED': GraphQLEnumValue( + 1, deprecation_reason='Removed in 1.0'), + 'ALSONONDEPRECATED': GraphQLEnumValue(2)}) + + TestType = GraphQLObjectType('TestType', { + 'testEnum': GraphQLField(TestEnum)}) + + schema = GraphQLSchema(TestType) + request = """ + { + __type(name: "TestEnum") { + name + enumValues(includeDeprecated: true) { + name + isDeprecated, + deprecationReason + } + } + } + """ + + assert graphql_sync(schema, request) == ({ + '__type': { + 'name': 'TestEnum', + 'enumValues': [{ + 'name': 'NONDEPRECATED', + 'isDeprecated': False, + 'deprecationReason': None + }, { + 'name': 'DEPRECATED', + 'isDeprecated': True, + 'deprecationReason': 'Removed in 1.0' + }, { + 'name': 'ALSONONDEPRECATED', + 'isDeprecated': False, + 'deprecationReason': None + }] + } + }, None) + + def respects_the_include_deprecated_parameter_for_enum_values(): + TestEnum = GraphQLEnumType('TestEnum', { + 'NONDEPRECATED': GraphQLEnumValue(0), + 'DEPRECATED': GraphQLEnumValue( + 1, deprecation_reason='Removed in 1.0'), + 'ALSONONDEPRECATED': GraphQLEnumValue(2)}) + + TestType = GraphQLObjectType('TestType', { + 'testEnum': GraphQLField(TestEnum)}) + + schema = GraphQLSchema(TestType) + request = """ + { + __type(name: "TestEnum") { + name + trueValues: enumValues(includeDeprecated: true) { + name + } + falseValues: enumValues(includeDeprecated: false) { + name + } + omittedValues: enumValues { + name + } + } + } + """ + + assert graphql_sync(schema, request) == ({ + '__type': { + 'name': 'TestEnum', + 'trueValues': [{ + 'name': 'NONDEPRECATED' + }, { + 'name': 'DEPRECATED' + }, { + 'name': 'ALSONONDEPRECATED' + }], + 'falseValues': [{ + 'name': 'NONDEPRECATED' + }, { + 'name': 'ALSONONDEPRECATED' + }], + 'omittedValues': [{ + 'name': 'NONDEPRECATED' + }, { + 'name': 'ALSONONDEPRECATED' + }] + } + }, None) + + def fails_as_expected_on_the_type_root_field_without_an_arg(): + TestType = GraphQLObjectType('TestType', { + 'testField': GraphQLField(GraphQLString)}) + + schema = GraphQLSchema(TestType) + request = """ + { + __type { + name + } + } + """ + assert graphql_sync(schema, request) == (None, [{ + 'message': missing_field_arg_message( + '__type', 'name', 'String!'), 'locations': [(3, 15)]}]) + + def exposes_descriptions_on_types_and_fields(): + QueryRoot = GraphQLObjectType('QueryRoot', { + 'onlyField': GraphQLField(GraphQLString)}) + + schema = GraphQLSchema(QueryRoot) + + request = """ + { + schemaType: __type(name: "__Schema") { + name, + description, + fields { + name, + description + } + } + } + """ + + assert graphql_sync(schema, request) == ({ + 'schemaType': { + 'name': '__Schema', + 'description': + 'A GraphQL Schema defines the capabilities of a' + ' GraphQL server. It exposes all available types and' + ' directives on the server, as well as the entry points' + ' for query, mutation, and subscription operations.', + 'fields': [{ + 'name': 'types', + 'description': + 'A list of all types supported by this server.' + }, { + 'name': 'queryType', + 'description': + 'The type that query operations will be rooted at.' + }, { + 'name': 'mutationType', + 'description': + 'If this server supports mutation, the type that' + ' mutation operations will be rooted at.' + }, { + 'name': 'subscriptionType', + 'description': + 'If this server support subscription, the type' + ' that subscription operations will be rooted at.' + }, { + 'name': 'directives', + 'description': + 'A list of all directives supported by this server.' + }] + } + }, None) + + def exposes_descriptions_on_enums(): + QueryRoot = GraphQLObjectType('QueryRoot', { + 'onlyField': GraphQLField(GraphQLString)}) + + schema = GraphQLSchema(QueryRoot) + request = """ + { + typeKindType: __type(name: "__TypeKind") { + name, + description, + enumValues { + name, + description + } + } + } + """ + + assert graphql_sync(schema, request) == ({ + 'typeKindType': { + 'name': '__TypeKind', + 'description': + 'An enum describing what kind of type' + ' a given `__Type` is.', + 'enumValues': [{ + 'description': 'Indicates this type is a scalar.', + 'name': 'SCALAR' + }, { + 'description': + 'Indicates this type is an object.' + + ' `fields` and `interfaces` are valid fields.', + 'name': 'OBJECT' + }, { + 'description': + 'Indicates this type is an interface.' + ' `fields` and `possibleTypes` are valid fields.', + 'name': 'INTERFACE' + }, { + 'description': + 'Indicates this type is a union.' + ' `possibleTypes` is a valid field.', + 'name': 'UNION' + }, { + 'description': + 'Indicates this type is an enum.' + ' `enumValues` is a valid field.', + 'name': 'ENUM' + }, { + 'description': + 'Indicates this type is an input object.' + ' `inputFields` is a valid field.', + 'name': 'INPUT_OBJECT' + }, { + 'description': + 'Indicates this type is a list.' + ' `ofType` is a valid field.', + 'name': 'LIST' + }, { + 'description': + 'Indicates this type is a non-null.' + ' `ofType` is a valid field.', + 'name': 'NON_NULL' + }] + } + }, None) + + def executes_introspection_query_without_calling_global_field_resolver(): + query_root = GraphQLObjectType('QueryRoot', { + 'onlyField': GraphQLField(GraphQLString)}) + + schema = GraphQLSchema(query_root) + source = get_introspection_query() + + called_for_fields = set() + + def field_resolver(value, info): + called_for_fields.add( + f'{info.parent_type.name}::{info.field_name}') + return value + + graphql_sync(schema, source, field_resolver=field_resolver) + assert not called_for_fields diff --git a/tests/type/test_predicate.py b/tests/type/test_predicate.py new file mode 100644 index 00000000..1180f48f --- /dev/null +++ b/tests/type/test_predicate.py @@ -0,0 +1,372 @@ +from pytest import raises + +from graphql.type import ( + GraphQLEnumType, GraphQLInputObjectType, GraphQLInterfaceType, + GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, + GraphQLString, GraphQLUnionType, + assert_abstract_type, assert_composite_type, assert_enum_type, + assert_input_object_type, assert_input_type, assert_interface_type, + assert_leaf_type, assert_list_type, assert_named_type, + assert_non_null_type, assert_nullable_type, assert_object_type, + assert_output_type, assert_scalar_type, assert_type, assert_union_type, + assert_wrapping_type, get_named_type, get_nullable_type, is_abstract_type, + is_composite_type, is_enum_type, is_input_object_type, is_input_type, + is_interface_type, is_leaf_type, is_list_type, is_named_type, + is_non_null_type, is_nullable_type, is_object_type, is_output_type, + is_scalar_type, is_type, is_union_type, is_wrapping_type) + +ObjectType = GraphQLObjectType('Object', {}) +InterfaceType = GraphQLInterfaceType('Interface') +UnionType = GraphQLUnionType('Union', types=[ObjectType]) +EnumType = GraphQLEnumType('Enum', values={'foo': {}}) +InputObjectType = GraphQLInputObjectType('InputObject', {}) +ScalarType = GraphQLScalarType( + 'Scalar', + serialize=lambda: {}, parse_value=lambda: {}, parse_literal=lambda: {}) + + +def describe_type_predicates(): + + def describe_is_type(): + + def returns_true_for_unwrapped_types(): + assert is_type(GraphQLString) is True + assert_type(GraphQLString) + assert is_type(ObjectType) is True + assert_type(ObjectType) + + def returns_true_for_wrapped_types(): + assert is_type(GraphQLNonNull(GraphQLString)) is True + assert_type(GraphQLNonNull(GraphQLString)) + + def returns_false_for_type_classes_rather_than_instance(): + assert is_type(GraphQLObjectType) is False + with raises(TypeError): + assert_type(GraphQLObjectType) + + def returns_false_for_random_garbage(): + assert is_type({'what': 'is this'}) is False + with raises(TypeError): + assert_type({'what': 'is this'}) + + def describe_is_scalar_type(): + + def returns_true_for_spec_defined_scalar(): + assert is_scalar_type(GraphQLString) is True + assert_scalar_type(GraphQLString) + + def returns_true_for_custom_scalar(): + assert is_scalar_type(ScalarType) is True + assert_scalar_type(ScalarType) + + def returns_false_for_non_scalar(): + assert is_scalar_type(EnumType) is False + with raises(TypeError): + assert_scalar_type(EnumType) + + def describe_is_object_type(): + + def returns_true_for_object_type(): + assert is_object_type(ObjectType) is True + assert_object_type(ObjectType) + + def returns_false_for_wrapped_object_type(): + assert is_object_type(GraphQLList(ObjectType)) is False + with raises(TypeError): + assert_object_type(GraphQLList(ObjectType)) + + def returns_false_for_non_object_type(): + assert is_scalar_type(InterfaceType) is False + with raises(TypeError): + assert_scalar_type(InterfaceType) + + def describe_is_interface_type(): + + def returns_true_for_interface_type(): + assert is_interface_type(InterfaceType) is True + assert_interface_type(InterfaceType) + + def returns_false_for_wrapped_interface_type(): + assert is_interface_type(GraphQLList(InterfaceType)) is False + with raises(TypeError): + assert_interface_type(GraphQLList(InterfaceType)) + + def returns_false_for_non_interface_type(): + assert is_interface_type(ObjectType) is False + with raises(TypeError): + assert_interface_type(ObjectType) + + def describe_is_union_type(): + + def returns_true_for_union_type(): + assert is_union_type(UnionType) is True + assert_union_type(UnionType) + + def returns_false_for_wrapped_union_type(): + assert is_union_type(GraphQLList(UnionType)) is False + with raises(TypeError): + assert_union_type(GraphQLList(UnionType)) + + def returns_false_for_non_union_type(): + assert is_union_type(ObjectType) is False + with raises(TypeError): + assert_union_type(ObjectType) + + def describe_is_enum_type(): + + def returns_true_for_enum_type(): + assert is_enum_type(EnumType) is True + assert_enum_type(EnumType) + + def returns_false_for_wrapped_enum_type(): + assert is_enum_type(GraphQLList(EnumType)) is False + with raises(TypeError): + assert_enum_type(GraphQLList(EnumType)) + + def returns_false_for_non_enum_type(): + assert is_enum_type(ScalarType) is False + with raises(TypeError): + assert_enum_type(ScalarType) + + def describe_is_input_object_type(): + + def returns_true_for_input_object_type(): + assert is_input_object_type(InputObjectType) is True + assert_input_object_type(InputObjectType) + + def returns_false_for_wrapped_input_object_type(): + assert is_input_object_type(GraphQLList(InputObjectType)) is False + with raises(TypeError): + assert_input_object_type(GraphQLList(InputObjectType)) + + def returns_false_for_non_input_object_type(): + assert is_input_object_type(ObjectType) is False + with raises(TypeError): + assert_input_object_type(ObjectType) + + def describe_is_list_type(): + + def returns_true_for_a_list_wrapped_type(): + assert is_list_type(GraphQLList(ObjectType)) is True + assert_list_type(GraphQLList(ObjectType)) + + def returns_false_for_an_unwrapped_type(): + assert is_list_type(ObjectType) is False + with raises(TypeError): + assert_list_type(ObjectType) + + def returns_true_for_a_non_list_wrapped_type(): + assert is_list_type( + GraphQLNonNull(GraphQLList(ObjectType))) is False + with raises(TypeError): + assert_list_type(GraphQLNonNull(GraphQLList(ObjectType))) + + def describe_is_non_null_type(): + + def returns_true_for_a_non_null_wrapped_type(): + assert is_non_null_type(GraphQLNonNull(ObjectType)) is True + assert_non_null_type(GraphQLNonNull(ObjectType)) + + def returns_false_for_an_unwrapped_type(): + assert is_non_null_type(ObjectType) is False + with raises(TypeError): + assert_non_null_type(ObjectType) + + def returns_true_for_a_not_non_null_wrapped_type(): + assert is_non_null_type( + GraphQLList(GraphQLNonNull(ObjectType))) is False + with raises(TypeError): + assert_non_null_type(GraphQLList(GraphQLNonNull(ObjectType))) + + def describe_is_input_type(): + + def returns_true_for_an_input_type(): + assert is_input_type(InputObjectType) is True + assert_input_type(InputObjectType) + + def returns_true_for_a_wrapped_input_type(): + assert is_input_type(GraphQLList(InputObjectType)) is True + assert_input_type(GraphQLList(InputObjectType)) + assert is_input_type(GraphQLNonNull(InputObjectType)) is True + assert_input_type(GraphQLNonNull(InputObjectType)) + + def returns_false_for_an_output_type(): + assert is_input_type(ObjectType) is False + with raises(TypeError): + assert_input_type(ObjectType) + + def returns_false_for_a_wrapped_output_type(): + assert is_input_type(GraphQLList(ObjectType)) is False + with raises(TypeError): + assert_input_type(GraphQLList(ObjectType)) + assert is_input_type(GraphQLNonNull(ObjectType)) is False + with raises(TypeError): + assert_input_type(GraphQLNonNull(ObjectType)) + + def describe_is_output_type(): + + def returns_true_for_an_output_type(): + assert is_output_type(ObjectType) is True + assert_output_type(ObjectType) + + def returns_true_for_a_wrapped_output_type(): + assert is_output_type(GraphQLList(ObjectType)) is True + assert_output_type(GraphQLList(ObjectType)) + assert is_output_type(GraphQLNonNull(ObjectType)) is True + assert_output_type(GraphQLNonNull(ObjectType)) + + def returns_false_for_an_input_type(): + assert is_output_type(InputObjectType) is False + with raises(TypeError): + assert_output_type(InputObjectType) + + def returns_false_for_a_wrapped_input_type(): + assert is_output_type(GraphQLList(InputObjectType)) is False + with raises(TypeError): + assert_output_type(GraphQLList(InputObjectType)) + assert is_output_type(GraphQLNonNull(InputObjectType)) is False + with raises(TypeError): + assert_output_type(GraphQLNonNull(InputObjectType)) + + def describe_is_leaf_type(): + + def returns_true_for_scalar_and_enum_types(): + assert is_leaf_type(ScalarType) is True + assert_leaf_type(ScalarType) + assert is_leaf_type(EnumType) is True + assert_leaf_type(EnumType) + + def returns_false_for_wrapped_leaf_type(): + assert is_leaf_type(GraphQLList(ScalarType)) is False + with raises(TypeError): + assert_leaf_type(GraphQLList(ScalarType)) + + def returns_false_for_non_leaf_type(): + assert is_leaf_type(ObjectType) is False + with raises(TypeError): + assert_leaf_type(ObjectType) + + def returns_false_for_wrapped_non_leaf_type(): + assert is_leaf_type(GraphQLList(ObjectType)) is False + with raises(TypeError): + assert_leaf_type(GraphQLList(ObjectType)) + + def describe_is_composite_type(): + + def returns_true_for_object_interface_and_union_types(): + assert is_composite_type(ObjectType) is True + assert_composite_type(ObjectType) + assert is_composite_type(InterfaceType) is True + assert_composite_type(InterfaceType) + assert is_composite_type(UnionType) is True + assert_composite_type(UnionType) + + def returns_false_for_wrapped_composite_type(): + assert is_composite_type(GraphQLList(ObjectType)) is False + with raises(TypeError): + assert_composite_type(GraphQLList(ObjectType)) + + def returns_false_for_non_composite_type(): + assert is_composite_type(InputObjectType) is False + with raises(TypeError): + assert_composite_type(InputObjectType) + + def returns_false_for_wrapped_non_composite_type(): + assert is_composite_type(GraphQLList(InputObjectType)) is False + with raises(TypeError): + assert_composite_type(GraphQLList(InputObjectType)) + + def describe_is_abstract_type(): + + def returns_true_for_interface_and_union_types(): + assert is_abstract_type(InterfaceType) is True + assert_abstract_type(InterfaceType) + assert is_abstract_type(UnionType) is True + assert_abstract_type(UnionType) + + def returns_false_for_wrapped_abstract_type(): + assert is_abstract_type(GraphQLList(InterfaceType)) is False + with raises(TypeError): + assert_abstract_type(GraphQLList(InterfaceType)) + + def returns_false_for_non_abstract_type(): + assert is_abstract_type(ObjectType) is False + with raises(TypeError): + assert_abstract_type(ObjectType) + + def returns_false_for_wrapped_non_abstract_type(): + assert is_abstract_type(GraphQLList(ObjectType)) is False + with raises(TypeError): + assert_abstract_type(GraphQLList(ObjectType)) + + def describe_is_wrapping_type(): + + def returns_true_for_list_and_non_null_types(): + assert is_wrapping_type(GraphQLList(ObjectType)) is True + assert_wrapping_type(GraphQLList(ObjectType)) + assert is_wrapping_type(GraphQLNonNull(ObjectType)) is True + assert_wrapping_type(GraphQLNonNull(ObjectType)) + + def returns_false_for_unwrapped_types(): + assert is_wrapping_type(ObjectType) is False + with raises(TypeError): + assert_wrapping_type(ObjectType) + + def describe_is_nullable_type(): + + def returns_true_for_unwrapped_types(): + assert is_nullable_type(ObjectType) is True + assert_nullable_type(ObjectType) + + def returns_true_for_list_of_non_null_types(): + assert is_nullable_type( + GraphQLList(GraphQLNonNull(ObjectType))) is True + assert_nullable_type(GraphQLList(GraphQLNonNull(ObjectType))) + + def returns_false_for_non_null_types(): + assert is_nullable_type(GraphQLNonNull(ObjectType)) is False + with raises(TypeError): + assert_nullable_type(GraphQLNonNull(ObjectType)) + + def describe_get_nullable_type(): + + def returns_none_for_no_type(): + assert get_nullable_type(None) is None + + def returns_self_for_a_nullable_type(): + assert get_nullable_type(ObjectType) is ObjectType + list_of_obj = GraphQLList(ObjectType) + assert get_nullable_type(list_of_obj) is list_of_obj + + def unwraps_non_null_type(): + assert get_nullable_type(GraphQLNonNull(ObjectType)) is ObjectType + + def describe_is_named_type(): + + def returns_true_for_unwrapped_types(): + assert is_named_type(ObjectType) is True + assert_named_type(ObjectType) + + def returns_false_for_list_and_non_null_types(): + assert is_named_type(GraphQLList(ObjectType)) is False + with raises(TypeError): + assert_named_type(GraphQLList(ObjectType)) + assert is_named_type(GraphQLNonNull(ObjectType)) is False + with raises(TypeError): + assert_named_type(GraphQLNonNull(ObjectType)) + + def describe_get_named_type(): + + def returns_none_for_no_type(): + assert get_named_type(None) is None + + def returns_self_for_an_unwrapped_type(): + assert get_named_type(ObjectType) is ObjectType + + def unwraps_wrapper_types(): + assert get_named_type(GraphQLNonNull(ObjectType)) is ObjectType + assert get_named_type(GraphQLList(ObjectType)) is ObjectType + + def unwraps_deeply_wrapper_types(): + assert get_named_type(GraphQLNonNull(GraphQLList(GraphQLNonNull( + ObjectType)))) is ObjectType diff --git a/tests/type/test_schema.py b/tests/type/test_schema.py new file mode 100644 index 00000000..7b7f41c7 --- /dev/null +++ b/tests/type/test_schema.py @@ -0,0 +1,59 @@ +from pytest import raises + +from graphql.language import DirectiveLocation +from graphql.type import ( + GraphQLField, GraphQLInterfaceType, + GraphQLObjectType, GraphQLSchema, GraphQLString, GraphQLInputObjectType, + GraphQLInputField, GraphQLDirective, GraphQLArgument, GraphQLList) + +InterfaceType = GraphQLInterfaceType('Interface', { + 'fieldName': GraphQLField(GraphQLString)}) + +DirectiveInputType = GraphQLInputObjectType('DirInput', { + 'field': GraphQLInputField(GraphQLString)}) + +WrappedDirectiveInputType = GraphQLInputObjectType('WrappedDirInput', { + 'field': GraphQLInputField(GraphQLString)}) + +Directive = GraphQLDirective( + name='dir', + locations=[DirectiveLocation.OBJECT], + args={'arg': GraphQLArgument(DirectiveInputType), + 'argList': GraphQLArgument(GraphQLList(WrappedDirectiveInputType))}) + +Schema = GraphQLSchema(query=GraphQLObjectType('Query', { + 'getObject': GraphQLField(InterfaceType, resolve=lambda: {})}), + directives=[Directive]) + + +def describe_type_system_schema(): + + def describe_type_map(): + + def includes_input_types_only_used_in_directives(): + assert 'DirInput' in Schema.type_map + assert 'WrappedDirInput' in Schema.type_map + + def describe_validity(): + + def describe_when_not_assumed_valid(): + + def configures_the_schema_to_still_needing_validation(): + # noinspection PyProtectedMember + assert GraphQLSchema( + assume_valid=False)._validation_errors is None + + def checks_the_configuration_for_mistakes(): + with raises(Exception): + # noinspection PyTypeChecker + GraphQLSchema(lambda: None) + with raises(Exception): + GraphQLSchema(types={}) + with raises(Exception): + GraphQLSchema(directives={}) + + def describe_when_assumed_valid(): + def configures_the_schema_to_have_no_errors(): + # noinspection PyProtectedMember + assert GraphQLSchema( + assume_valid=True)._validation_errors == [] diff --git a/tests/type/test_serialization.py b/tests/type/test_serialization.py new file mode 100644 index 00000000..d6360eee --- /dev/null +++ b/tests/type/test_serialization.py @@ -0,0 +1,210 @@ +from math import inf, nan + +from pytest import raises + +from graphql.type import ( + GraphQLBoolean, GraphQLFloat, GraphQLID, GraphQLInt, GraphQLString) + + +def describe_type_system_scalar_coercion(): + + def serializes_output_as_int(): + assert GraphQLInt.serialize(1) == 1 + assert GraphQLInt.serialize('123') == 123 + assert GraphQLInt.serialize(0) == 0 + assert GraphQLInt.serialize(-1) == -1 + assert GraphQLInt.serialize(1e5) == 100000 + assert GraphQLInt.serialize(False) == 0 + assert GraphQLInt.serialize(True) == 1 + + # The GraphQL specification does not allow serializing non-integer + # values as Int to avoid accidental data loss. + with raises(TypeError) as exc_info: + GraphQLInt.serialize(0.1) + assert str(exc_info.value) == ( + 'Int cannot represent non-integer value: 0.1') + with raises(TypeError) as exc_info: + GraphQLInt.serialize(1.1) + assert str(exc_info.value) == ( + 'Int cannot represent non-integer value: 1.1') + with raises(TypeError) as exc_info: + GraphQLInt.serialize(-1.1) + assert str(exc_info.value) == ( + 'Int cannot represent non-integer value: -1.1') + with raises(TypeError) as exc_info: + GraphQLInt.serialize('-1.1') + assert str(exc_info.value) == ( + "Int cannot represent non-integer value: '-1.1'") + # Maybe a safe JavaScript int, but bigger than 2^32, so not + # representable as a GraphQL Int + with raises(Exception) as exc_info: + GraphQLInt.serialize(9876504321) + assert str(exc_info.value) == ( + 'Int cannot represent non 32-bit signed integer value:' + ' 9876504321') + with raises(Exception) as exc_info: + GraphQLInt.serialize(-9876504321) + assert str(exc_info.value) == ( + 'Int cannot represent non 32-bit signed integer value:' + ' -9876504321') + # Too big to represent as an Int in JavaScript or GraphQL + with raises(Exception) as exc_info: + GraphQLInt.serialize(1e100) + assert str(exc_info.value) == ( + 'Int cannot represent non 32-bit signed integer value: 1e+100') + with raises(Exception) as exc_info: + GraphQLInt.serialize(-1e100) + assert str(exc_info.value) == ( + 'Int cannot represent non 32-bit signed integer value: -1e+100') + with raises(Exception) as exc_info: + GraphQLInt.serialize('one') + assert str(exc_info.value) == ( + "Int cannot represent non-integer value: 'one'") + # Doesn't represent number + with raises(Exception) as exc_info: + GraphQLInt.serialize('') + assert str(exc_info.value) == ( + "Int cannot represent non-integer value: ''") + with raises(Exception) as exc_info: + GraphQLInt.serialize(nan) + assert str(exc_info.value) == ( + 'Int cannot represent non-integer value: nan') + with raises(Exception) as exc_info: + GraphQLInt.serialize(inf) + assert str(exc_info.value) == ( + 'Int cannot represent non-integer value: inf') + with raises(Exception) as exc_info: + GraphQLInt.serialize([5]) + assert str(exc_info.value) == ( + 'Int cannot represent non-integer value: [5]') + + def serializes_output_as_float(): + assert GraphQLFloat.serialize(1) == 1.0 + assert GraphQLFloat.serialize(0) == 0.0 + assert GraphQLFloat.serialize('123.5') == 123.5 + assert GraphQLFloat.serialize(-1) == -1.0 + assert GraphQLFloat.serialize(0.1) == 0.1 + assert GraphQLFloat.serialize(1.1) == 1.1 + assert GraphQLFloat.serialize(-1.1) == -1.1 + assert GraphQLFloat.serialize('-1.1') == -1.1 + assert GraphQLFloat.serialize(False) == 0 + assert GraphQLFloat.serialize(True) == 1 + + with raises(Exception) as exc_info: + GraphQLFloat.serialize(nan) + assert str(exc_info.value) == ( + 'Float cannot represent non numeric value: nan') + with raises(Exception) as exc_info: + GraphQLFloat.serialize(inf) + assert str(exc_info.value) == ( + 'Float cannot represent non numeric value: inf') + with raises(Exception) as exc_info: + GraphQLFloat.serialize('one') + assert str(exc_info.value) == ( + "Float cannot represent non numeric value: 'one'") + with raises(Exception) as exc_info: + GraphQLFloat.serialize('') + assert str(exc_info.value) == ( + "Float cannot represent non numeric value: ''") + with raises(Exception) as exc_info: + GraphQLFloat.serialize([5]) + assert str(exc_info.value) == ( + 'Float cannot represent non numeric value: [5]') + + def serializes_output_as_string(): + assert GraphQLString.serialize('string') == 'string' + assert GraphQLString.serialize(1) == '1' + assert GraphQLString.serialize(-1.1) == '-1.1' + assert GraphQLString.serialize(True) == 'true' + assert GraphQLString.serialize(False) == 'false' + + class StringableObjValue: + def __str__(self): + return 'something useful' + + assert GraphQLString.serialize( + StringableObjValue()) == 'something useful' + + with raises(Exception) as exc_info: + GraphQLString.serialize(nan) + assert str(exc_info.value) == ( + 'String cannot represent value: nan') + + with raises(Exception) as exc_info: + GraphQLString.serialize([1]) + assert str(exc_info.value) == ( + 'String cannot represent value: [1]') + + with raises(Exception) as exc_info: + GraphQLString.serialize({}) + assert str(exc_info.value) == ( + 'String cannot represent value: {}') + + def serializes_output_as_boolean(): + assert GraphQLBoolean.serialize(1) is True + assert GraphQLBoolean.serialize(0) is False + assert GraphQLBoolean.serialize(True) is True + assert GraphQLBoolean.serialize(False) is False + + with raises(Exception) as exc_info: + GraphQLBoolean.serialize(nan) + assert str(exc_info.value) == ( + 'Boolean cannot represent a non boolean value: nan') + + with raises(Exception) as exc_info: + GraphQLBoolean.serialize('') + assert str(exc_info.value) == ( + "Boolean cannot represent a non boolean value: ''") + + with raises(Exception) as exc_info: + GraphQLBoolean.serialize('True') + assert str(exc_info.value) == ( + "Boolean cannot represent a non boolean value: 'True'") + + with raises(Exception) as exc_info: + GraphQLBoolean.serialize([False]) + assert str(exc_info.value) == ( + 'Boolean cannot represent a non boolean value: [False]') + + with raises(Exception) as exc_info: + GraphQLBoolean.serialize({}) + assert str(exc_info.value) == ( + 'Boolean cannot represent a non boolean value: {}') + + def serializes_output_as_id(): + assert GraphQLID.serialize('string') == 'string' + assert GraphQLID.serialize('false') == 'false' + assert GraphQLID.serialize('') == '' + assert GraphQLID.serialize(123) == '123' + assert GraphQLID.serialize(0) == '0' + assert GraphQLID.serialize(-1) == '-1' + + class ObjValue: + def __init__(self, value): + self._id = value + + def __str__(self): + return str(self._id) + + obj_value = ObjValue(123) + assert GraphQLID.serialize(obj_value) == '123' + + with raises(Exception) as exc_info: + GraphQLID.serialize(True) + assert str(exc_info.value) == ( + "ID cannot represent value: True") + + with raises(Exception) as exc_info: + GraphQLID.serialize(3.14) + assert str(exc_info.value) == ( + "ID cannot represent value: 3.14") + + with raises(Exception) as exc_info: + GraphQLID.serialize({}) + assert str(exc_info.value) == ( + "ID cannot represent value: {}") + + with raises(Exception) as exc_info: + GraphQLID.serialize(['abc']) + assert str(exc_info.value) == ( + "ID cannot represent value: ['abc']") diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py new file mode 100644 index 00000000..e7091ed7 --- /dev/null +++ b/tests/type/test_validation.py @@ -0,0 +1,1377 @@ +from typing import cast + +from pytest import fixture, mark, raises + +from graphql.language import parse +from graphql.type import ( + GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLInputField, GraphQLInputObjectType, GraphQLInterfaceType, + GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, + GraphQLSchema, GraphQLString, GraphQLUnionType, validate_schema, + GraphQLArgument, GraphQLDirective) +from graphql.utilities import build_schema, extend_schema + + +def _get(value): + """Return a fixed value""" + return lambda *args: value + + +SomeScalarType = GraphQLScalarType( + name='SomeScalar', + serialize=_get(None), + parse_value=_get(None), + parse_literal=_get(None)) + +SomeInterfaceType = GraphQLInterfaceType( + name='SomeInterface', + fields=lambda: {'f': GraphQLField(SomeObjectType)}) + +SomeObjectType = GraphQLObjectType( + name='SomeObject', + fields=lambda: {'f': GraphQLField(SomeObjectType)}, + interfaces=[SomeInterfaceType]) + +SomeUnionType = GraphQLUnionType( + name='SomeUnion', + types=[SomeObjectType]) + +SomeEnumType = GraphQLEnumType( + name='SomeEnum', + values={'ONLY': GraphQLEnumValue()}) + +SomeInputObjectType = GraphQLInputObjectType( + name='SomeInputObject', + fields={'val': GraphQLInputField(GraphQLString, default_value='hello')}) + + +def with_modifiers(types): + return types + [ + GraphQLList(t) for t in types] + [ + GraphQLNonNull(t) for t in types] + [ + GraphQLNonNull(GraphQLList(t)) for t in types] + + +output_types = with_modifiers([ + GraphQLString, + SomeScalarType, + SomeEnumType, + SomeObjectType, + SomeUnionType, + SomeInterfaceType]) + +not_output_types = with_modifiers([SomeInputObjectType]) + +input_types = with_modifiers([ + GraphQLString, + SomeScalarType, + SomeEnumType, + SomeInputObjectType]) + +not_input_types = with_modifiers([ + SomeObjectType, + SomeUnionType, + SomeInterfaceType]) + + +def schema_with_field_type(type_): + return GraphQLSchema( + query=GraphQLObjectType( + name='Query', + fields={'f': GraphQLField(type_)}), + types=[type_]) + + +def describe_type_system_a_schema_must_have_object_root_types(): + + def accepts_a_schema_whose_query_type_is_an_object_type(): + schema = build_schema(""" + type Query { + test: String + } + """) + assert validate_schema(schema) == [] + + schema_with_def = build_schema(""" + schema { + query: QueryRoot + } + + type QueryRoot { + test: String + } + """) + + assert validate_schema(schema_with_def) == [] + + def accepts_a_schema_whose_query_and_mutation_types_are_object_types(): + schema = build_schema(""" + type Query { + test: String + } + + type Mutation { + test: String + } + """) + assert validate_schema(schema) == [] + + schema_with_def = build_schema(""" + schema { + query: QueryRoot + mutation: MutationRoot + } + + type QueryRoot { + test: String + } + + type MutationRoot { + test: String + } + """) + assert validate_schema(schema_with_def) == [] + + def accepts_a_schema_whose_query_and_subscription_types_are_object_types(): + schema = build_schema(""" + type Query { + test: String + } + + type Subscription { + test: String + } + """) + assert validate_schema(schema) == [] + + schema_with_def = build_schema(""" + schema { + query: QueryRoot + subscription: SubscriptionRoot + } + + type QueryRoot { + test: String + } + + type SubscriptionRoot { + test: String + } + """) + assert validate_schema(schema_with_def) == [] + + def rejects_a_schema_without_a_query_type(): + schema = build_schema(""" + type Mutation { + test: String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Query root type must be provided.', + 'locations': None}] + + schema_with_def = build_schema(""" + schema { + mutation: MutationRoot + } + + type MutationRoot { + test: String + } + """) + assert validate_schema(schema_with_def) == [{ + 'message': 'Query root type must be provided.', + 'locations': [(2, 13)]}] + + def rejects_a_schema_whose_query_root_type_is_not_an_object_type(): + schema = build_schema(""" + input Query { + test: String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Query root type must be Object type,' + ' it cannot be Query.', + 'locations': [(2, 13)]}] + + schema_with_def = build_schema(""" + schema { + query: SomeInputObject + } + + input SomeInputObject { + test: String + } + """) + assert validate_schema(schema_with_def) == [{ + 'message': 'Query root type must be Object type,' + ' it cannot be SomeInputObject.', + 'locations': [(3, 22)]}] + + def rejects_a_schema_whose_mutation_type_is_an_input_type(): + schema = build_schema(""" + type Query { + field: String + } + + input Mutation { + test: String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Mutation root type must be Object type if provided,' + ' it cannot be Mutation.', + 'locations': [(6, 13)]}] + + schema_with_def = build_schema(""" + schema { + query: Query + mutation: SomeInputObject + } + + type Query { + field: String + } + + input SomeInputObject { + test: String + } + """) + assert validate_schema(schema_with_def) == [{ + 'message': 'Mutation root type must be Object type if provided,' + ' it cannot be SomeInputObject.', + 'locations': [(4, 25)]}] + + def rejects_a_schema_whose_subscription_type_is_an_input_type(): + schema = build_schema(""" + type Query { + field: String + } + + input Subscription { + test: String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Subscription root type must be Object type if' + ' provided, it cannot be Subscription.', + 'locations': [(6, 13)]}] + + schema_with_def = build_schema(""" + schema { + query: Query + subscription: SomeInputObject + } + + type Query { + field: String + } + + input SomeInputObject { + test: String + } + """) + assert validate_schema(schema_with_def) == [{ + 'message': 'Subscription root type must be Object type if' + ' provided, it cannot be SomeInputObject.', + 'locations': [(4, 29)]}] + + def rejects_a_schema_extended_with_invalid_root_types(): + schema = build_schema(""" + input SomeInputObject { + test: String + } + """) + schema = extend_schema(schema, parse(""" + extend schema { + query: SomeInputObject + } + """)) + schema = extend_schema(schema, parse(""" + extend schema { + mutation: SomeInputObject + } + """)) + schema = extend_schema(schema, parse(""" + extend schema { + subscription: SomeInputObject + } + """)) + assert validate_schema(schema) == [{ + 'message': 'Query root type must be Object type,' + ' it cannot be SomeInputObject.', + 'locations': [(3, 22)] + }, { + 'message': 'Mutation root type must be Object type' + ' if provided, it cannot be SomeInputObject.', + 'locations': [(3, 25)] + }, { + 'message': 'Subscription root type must be Object type' + ' if provided, it cannot be SomeInputObject.', + 'locations': [(3, 29)] + }] + + def rejects_a_schema_whose_directives_are_incorrectly_typed(): + schema = GraphQLSchema(SomeObjectType, directives=[ + cast(GraphQLDirective, 'somedirective')]) + msg = validate_schema(schema)[0].message + assert msg == "Expected directive but got: 'somedirective'." + + +def describe_type_system_objects_must_have_fields(): + + def accepts_an_object_type_with_fields_object(): + schema = build_schema(""" + type Query { + field: SomeObject + } + + type SomeObject { + field: String + } + """) + assert validate_schema(schema) == [] + + def rejects_an_object_type_with_missing_fields(): + schema = build_schema(""" + type Query { + test: IncompleteObject + } + + type IncompleteObject + """) + assert validate_schema(schema) == [{ + 'message': 'Type IncompleteObject must define one or more fields.', + 'locations': [(6, 13)]}] + + manual_schema = schema_with_field_type( + GraphQLObjectType('IncompleteObject', {})) + msg = validate_schema(manual_schema)[0].message + assert msg == "Type IncompleteObject must define one or more fields." + + manual_schema_2 = schema_with_field_type( + GraphQLObjectType('IncompleteObject', lambda: {})) + msg = validate_schema(manual_schema_2)[0].message + assert msg == "Type IncompleteObject must define one or more fields." + + def rejects_an_object_type_with_incorrectly_named_fields(): + schema = schema_with_field_type(GraphQLObjectType('SomeObject', { + 'bad-name-with-dashes': GraphQLField(GraphQLString)})) + msg = validate_schema(schema)[0].message + assert msg == ( + 'Names must match /^[_a-zA-Z][_a-zA-Z0-9]*$/' + " but 'bad-name-with-dashes' does not.") + + +def describe_type_system_field_args_must_be_properly_named(): + + def accepts_field_args_with_valid_names(): + schema = schema_with_field_type(GraphQLObjectType('SomeObject', { + 'goodField': GraphQLField(GraphQLString, args={ + 'goodArg': GraphQLArgument(GraphQLString)})})) + assert validate_schema(schema) == [] + + def reject_field_args_with_invalid_names(): + QueryType = GraphQLObjectType('SomeObject', { + 'badField': GraphQLField(GraphQLString, args={ + 'bad-name-with-dashes': GraphQLArgument(GraphQLString)})}) + schema = GraphQLSchema(QueryType) + msg = validate_schema(schema)[0].message + assert msg == ( + 'Names must match /^[_a-zA-Z][_a-zA-Z0-9]*$/' + " but 'bad-name-with-dashes' does not.") + + +def describe_type_system_union_types_must_be_valid(): + + def accepts_a_union_type_with_member_types(): + schema = build_schema(""" + type Query { + test: GoodUnion + } + + type TypeA { + field: String + } + + type TypeB { + field: String + } + + union GoodUnion = + | TypeA + | TypeB + """) + assert validate_schema(schema) == [] + + def rejects_a_union_type_with_empty_types(): + schema = build_schema(""" + type Query { + test: BadUnion + } + + union BadUnion + """) + schema = extend_schema(schema, parse(""" + directive @test on UNION + + extend union BadUnion @test + """)) + assert validate_schema(schema) == [{ + 'message': 'Union type BadUnion must define one or more' + ' member types.', + 'locations': [(6, 13), (4, 13)]}] + + def rejects_a_union_type_with_duplicated_member_type(): + schema = build_schema(""" + type Query { + test: BadUnion + } + + type TypeA { + field: String + } + + type TypeB { + field: String + } + + union BadUnion = + | TypeA + | TypeB + | TypeA + """) + + assert validate_schema(schema) == [{ + 'message': 'Union type BadUnion can only include type TypeA once.', + 'locations': [(15, 17), (17, 17)]}] + + schema = extend_schema(schema, parse('extend union BadUnion = TypeB')) + + assert validate_schema(schema) == [{ + 'message': 'Union type BadUnion can only include type TypeA once.', + 'locations': [(15, 17), (17, 17)]}, { + 'message': 'Union type BadUnion can only include type TypeB once.', + 'locations': [(16, 17), (1, 25)]}] + + def rejects_a_union_type_with_non_object_member_types(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + build_schema(""" + type Query { + test: BadUnion + } + + type TypeA { + field: String + } + + type TypeB { + field: String + } + + union BadUnion = + | TypeA + | String + | TypeB + """) + + msg = str(exc_info.value) + assert msg == 'BadUnion types must be GraphQLObjectType objects.' + + bad_union_member_types = [ + GraphQLString, + GraphQLNonNull(SomeObjectType), + GraphQLList(SomeObjectType), + SomeInterfaceType, + SomeUnionType, + SomeEnumType, + SomeInputObjectType] + for member_type in bad_union_member_types: + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_field_type(GraphQLUnionType( + 'BadUnion', types=[member_type])) + msg = str(exc_info.value) + assert msg == 'BadUnion types must be GraphQLObjectType objects.' + + +def describe_type_system_input_objects_must_have_fields(): + + def accepts_an_input_object_type_with_fields(): + schema = build_schema(""" + type Query { + field(arg: SomeInputObject): String + } + + input SomeInputObject { + field: String + } + """) + assert validate_schema(schema) == [] + + def rejects_an_input_object_type_with_missing_fields(): + schema = build_schema(""" + type Query { + field(arg: SomeInputObject): String + } + + input SomeInputObject + """) + assert validate_schema(schema) == [{ + 'message': 'Input Object type SomeInputObject' + ' must define one or more fields.', + 'locations': [(6, 13)]}] + + def rejects_an_input_object_type_with_incorrectly_typed_fields(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + build_schema(""" + type Query { + field(arg: SomeInputObject): String + } + + type SomeObject { + field: String + } + + union SomeUnion = SomeObject + + input SomeInputObject { + badObject: SomeObject + badUnion: SomeUnion + goodInputObject: SomeInputObject + } + """) + msg = str(exc_info.value) + assert msg == ( + 'SomeInputObject fields cannot be resolved:' + ' Input field type must be a GraphQL input type.') + + +def describe_type_system_enum_types_must_be_well_defined(): + + def rejects_an_enum_type_without_values(): + schema = build_schema(""" + type Query { + field: SomeEnum + } + + enum SomeEnum + """) + + schema = extend_schema(schema, parse(""" + directive @test on ENUM + + extend enum SomeEnum @test + """)) + + assert validate_schema(schema) == [{ + 'message': 'Enum type SomeEnum must define one or more values.', + 'locations': [(6, 13), (4, 13)]}] + + def rejects_an_enum_type_with_duplicate_values(): + schema = build_schema(""" + type Query { + field: SomeEnum + } + + enum SomeEnum { + SOME_VALUE + SOME_VALUE + } + """) + assert validate_schema(schema) == [{ + 'message': 'Enum type SomeEnum can include value SOME_VALUE' + ' only once.', + 'locations': [(7, 15), (8, 15)]}] + + def rejects_an_enum_type_with_incorrectly_named_values(): + def schema_with_enum(name): + return schema_with_field_type(GraphQLEnumType( + 'SomeEnum', {name: GraphQLEnumValue(1)})) + + schema1 = schema_with_enum('#value') + msg = validate_schema(schema1)[0].message + assert msg == ( + 'Names must match /^[_a-zA-Z][_a-zA-Z0-9]*$/' + " but '#value' does not.") + + schema2 = schema_with_enum('1value') + msg = validate_schema(schema2)[0].message + assert msg == ( + 'Names must match /^[_a-zA-Z][_a-zA-Z0-9]*$/' + " but '1value' does not.") + + schema3 = schema_with_enum('KEBAB-CASE') + msg = validate_schema(schema3)[0].message + assert msg == ( + 'Names must match /^[_a-zA-Z][_a-zA-Z0-9]*$/' + " but 'KEBAB-CASE' does not.") + + schema4 = schema_with_enum('true') + msg = validate_schema(schema4)[0].message + assert msg == ( + 'Enum type SomeEnum cannot include value: true.') + + schema5 = schema_with_enum('false') + msg = validate_schema(schema5)[0].message + assert msg == ( + 'Enum type SomeEnum cannot include value: false.') + + schema6 = schema_with_enum('null') + msg = validate_schema(schema6)[0].message + assert msg == ( + 'Enum type SomeEnum cannot include value: null.') + + +def describe_type_system_object_fields_must_have_output_types(): + + @fixture + def schema_with_object_field_of_type(field_type): + BadObjectType = GraphQLObjectType('BadObject', { + 'badField': GraphQLField(field_type)}) + return GraphQLSchema(GraphQLObjectType('Query', { + 'f': GraphQLField(BadObjectType)}), types=[SomeObjectType]) + + @mark.parametrize('type_', output_types) + def accepts_an_output_type_as_an_object_field_type(type_): + schema = schema_with_object_field_of_type(type_) + assert validate_schema(schema) == [] + + def rejects_an_empty_object_field_type(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_object_field_of_type(None) + msg = str(exc_info.value) + assert msg == 'Field type must be an output type.' + + @mark.parametrize('type_', not_output_types) + def rejects_a_non_output_type_as_an_object_field_type(type_): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_object_field_of_type(type_) + msg = str(exc_info.value) + assert msg == 'Field type must be an output type.' + + @mark.parametrize('type_', [int, float, str]) + def rejects_a_non_type_value_as_an_object_field_type(type_): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_object_field_of_type(type_) + msg = str(exc_info.value) + assert msg == 'Field type must be an output type.' + + def rejects_with_relevant_locations_for_a_non_output_type(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + build_schema(""" + type Query { + field: [SomeInputObject] + } + + input SomeInputObject { + field: String + } + """) + msg = str(exc_info.value) + assert msg == ( + 'Query fields cannot be resolved:' + ' Field type must be an output type.') + + +def describe_type_system_objects_can_only_implement_unique_interfaces(): + + def rejects_an_object_implementing_a_non_type_values(): + schema = GraphQLSchema( + query=GraphQLObjectType('BadObject', { + 'f': GraphQLField(GraphQLString)}, interfaces=[])) + schema.query_type.interfaces.append(None) + + assert validate_schema(schema) == [{ + 'message': 'Type BadObject must only implement Interface types,' + ' it cannot implement None.'}] + + def rejects_an_object_implementing_a_non_interface_type(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + build_schema(""" + type Query { + test: BadObject + } + + input SomeInputObject { + field: String + } + + type BadObject implements SomeInputObject { + field: String + } + """) + msg = str(exc_info.value) + assert msg == ( + 'BadObject interfaces must be GraphQLInterface objects.') + + def rejects_an_object_implementing_the_same_interface_twice(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: String + } + + type AnotherObject implements AnotherInterface & AnotherInterface { + field: String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Type AnotherObject can only implement' + ' AnotherInterface once.', + 'locations': [(10, 43), (10, 62)]}] + + def rejects_an_object_implementing_same_interface_twice_due_to_extension(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: String + } + + type AnotherObject implements AnotherInterface { + field: String + } + """) + extended_schema = extend_schema(schema, parse( + 'extend type AnotherObject implements AnotherInterface')) + assert validate_schema(extended_schema) == [{ + 'message': 'Type AnotherObject can only implement' + ' AnotherInterface once.', + 'locations': [(10, 43), (1, 38)]}] + + +def describe_type_system_interface_extensions_should_be_valid(): + + def rejects_object_implementing_extended_interface_due_to_missing_field(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: String + } + + type AnotherObject implements AnotherInterface { + field: String + } + """) + extended_schema = extend_schema(schema, parse(""" + extend interface AnotherInterface { + newField: String + } + + extend type AnotherObject { + differentNewField: String + } + """)) + assert validate_schema(extended_schema) == [{ + 'message': 'Interface field AnotherInterface.newField expected' + ' but AnotherObject does not provide it.', + 'locations': [(3, 15), (10, 13), (6, 13)]}] + + def rejects_object_implementing_extended_interface_due_to_missing_args(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: String + } + + type AnotherObject implements AnotherInterface { + field: String + } + """) + extended_schema = extend_schema(schema, parse(""" + extend interface AnotherInterface { + newField(test: Boolean): String + } + + extend type AnotherObject { + newField: String + } + """)) + assert validate_schema(extended_schema) == [{ + 'message': 'Interface field argument' + ' AnotherInterface.newField(test:) expected' + ' but AnotherObject.newField does not provide it.', + 'locations': [(3, 24), (7, 15)]}] + + def rejects_object_implementing_extended_interface_due_to_type_mismatch(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: String + } + + type AnotherObject implements AnotherInterface { + field: String + } + """) + extended_schema = extend_schema(schema, parse(""" + extend interface AnotherInterface { + newInterfaceField: NewInterface + } + + interface NewInterface { + newField: String + } + + interface MismatchingInterface { + newField: String + } + + extend type AnotherObject { + newInterfaceField: MismatchingInterface + } + + # Required to prevent unused interface errors + type DummyObject implements NewInterface & MismatchingInterface { + newField: String + } + """)) + assert validate_schema(extended_schema) == [{ + 'message': 'Interface field AnotherInterface.newInterfaceField' + ' expects type NewInterface' + ' but AnotherObject.newInterfaceField' + ' is type MismatchingInterface.', + 'locations': [(3, 34), (15, 34)]}] + + +def describe_type_system_interface_fields_must_have_output_types(): + + @fixture + def schema_with_interface_field_of_type(field_type): + BadInterfaceType = GraphQLInterfaceType('BadInterface', { + 'badField': GraphQLField(field_type)}) + BadImplementingType = GraphQLObjectType('BadImplementing', { + 'badField': GraphQLField(field_type)}, + interfaces=[BadInterfaceType]) + return GraphQLSchema(GraphQLObjectType('Query', { + 'f': GraphQLField(BadInterfaceType)}), + types=[BadImplementingType, SomeObjectType]) + + @mark.parametrize('type_', output_types) + def accepts_an_output_type_as_an_interface_field_type(type_): + schema = schema_with_interface_field_of_type(type_) + assert validate_schema(schema) == [] + + def rejects_an_empty_interface_field_type(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_interface_field_of_type(None) + msg = str(exc_info.value) + assert msg == 'Field type must be an output type.' + + @mark.parametrize('type_', not_output_types) + def rejects_a_non_output_type_as_an_interface_field_type(type_): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_interface_field_of_type(type_) + msg = str(exc_info.value) + assert msg == 'Field type must be an output type.' + + @mark.parametrize('type_', [int, float, str]) + def rejects_a_non_type_value_as_an_interface_field_type(type_): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_interface_field_of_type(type_) + msg = str(exc_info.value) + assert msg == 'Field type must be an output type.' + + def rejects_a_non_output_type_as_an_interface_field_with_locations(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + build_schema(""" + type Query { + test: SomeInterface + } + + interface SomeInterface { + field: SomeInputObject + } + + input SomeInputObject { + foo: String + } + + type SomeObject implements SomeInterface { + field: SomeInputObject + } + """) + msg = str(exc_info.value) + assert msg == ( + 'SomeInterface fields cannot be resolved:' + ' Field type must be an output type.') + + def accepts_an_interface_not_implemented_by_at_least_one_object(): + schema = build_schema(""" + type Query { + test: SomeInterface + } + + interface SomeInterface { + foo: String + } + """) + assert validate_schema(schema) == [] + + +def describe_type_system_field_arguments_must_have_input_types(): + + @fixture + def schema_with_arg_of_type(arg_type): + BadObjectType = GraphQLObjectType('BadObject', { + 'badField': GraphQLField(GraphQLString, args={ + 'badArg': GraphQLArgument(arg_type)})}) + return GraphQLSchema(GraphQLObjectType('Query', { + 'f': GraphQLField(BadObjectType)})) + + @mark.parametrize('type_', input_types) + def accepts_an_input_type_as_a_field_arg_type(type_): + schema = schema_with_arg_of_type(type_) + assert validate_schema(schema) == [] + + def rejects_an_empty_field_arg_type(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_arg_of_type(None) + msg = str(exc_info.value) + assert msg == 'Argument type must be a GraphQL input type.' + + @mark.parametrize('type_', not_input_types) + def rejects_a_non_input_type_as_a_field_arg_type(type_): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_arg_of_type(type_) + msg = str(exc_info.value) + assert msg == 'Argument type must be a GraphQL input type.' + + @mark.parametrize('type_', [int, float, str]) + def rejects_a_non_type_value_as_a_field_arg_type(type_): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_arg_of_type(type_) + msg = str(exc_info.value) + assert msg == 'Argument type must be a GraphQL input type.' + + def rejects_a_non_input_type_as_a_field_arg_with_locations(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + build_schema(""" + type Query { + test(arg: SomeObject): String + } + + type SomeObject { + foo: String + } + """) + msg = str(exc_info.value) + assert msg == ( + 'Query fields cannot be resolved:' + ' Argument type must be a GraphQL input type.') + + +def describe_type_system_input_object_fields_must_have_input_types(): + + @fixture + def schema_with_input_field_of_type(input_field_type): + BadInputObjectType = GraphQLInputObjectType('BadInputObject', { + 'badField': GraphQLInputField(input_field_type)}) + return GraphQLSchema(GraphQLObjectType('Query', { + 'f': GraphQLField(GraphQLString, args={ + 'badArg': GraphQLArgument(BadInputObjectType)})})) + + @mark.parametrize('type_', input_types) + def accepts_an_input_type_as_an_input_fieldtype(type_): + schema = schema_with_input_field_of_type(type_) + assert validate_schema(schema) == [] + + def rejects_an_empty_input_field_type(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_input_field_of_type(None) + msg = str(exc_info.value) + assert msg == 'Input field type must be a GraphQL input type.' + + @mark.parametrize('type_', not_input_types) + def rejects_a_non_input_type_as_an_input_field_type(type_): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_input_field_of_type(type_) + msg = str(exc_info.value) + assert msg == 'Input field type must be a GraphQL input type.' + + @mark.parametrize('type_', [int, float, str]) + def rejects_a_non_type_value_as_an_input_field_type(type_): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + schema_with_input_field_of_type(type_) + msg = str(exc_info.value) + assert msg == 'Input field type must be a GraphQL input type.' + + def rejects_with_relevant_locations_for_a_non_input_type(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + build_schema(""" + type Query { + test(arg: SomeInputObject): String + } + + input SomeInputObject { + foo: SomeObject + } + + type SomeObject { + bar: String + } + """) + msg = str(exc_info.value) + assert msg == ( + 'SomeInputObject fields cannot be resolved:' + ' Input field type must be a GraphQL input type.') + + +def describe_objects_must_adhere_to_interfaces_they_implement(): + + def accepts_an_object_which_implements_an_interface(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field(input: String): String + } + + type AnotherObject implements AnotherInterface { + field(input: String): String + } + """) + assert validate_schema(schema) == [] + + def accepts_an_object_which_implements_an_interface_and_with_more_fields(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field(input: String): String + } + + type AnotherObject implements AnotherInterface { + field(input: String): String + anotherField: String + } + """) + assert validate_schema(schema) == [] + + def accepts_an_object_which_implements_an_interface_field_with_more_args(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field(input: String): String + } + + type AnotherObject implements AnotherInterface { + field(input: String, anotherInput: String): String + } + """) + assert validate_schema(schema) == [] + + def rejects_an_object_missing_an_interface_field(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field(input: String): String + } + + type AnotherObject implements AnotherInterface { + anotherField: String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Interface field AnotherInterface.field expected but' + ' AnotherObject does not provide it.', + 'locations': [(7, 15), (10, 13)]}] + + def rejects_an_object_with_an_incorrectly_typed_interface_field(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field(input: String): String + } + + type AnotherObject implements AnotherInterface { + field(input: String): Int + } + """) + assert validate_schema(schema) == [{ + 'message': 'Interface field AnotherInterface.field' + ' expects type String but' + ' AnotherObject.field is type Int.', + 'locations': [(7, 37), (11, 37)]}] + + def rejects_an_object_with_a_differently_typed_interface_field(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + type A { foo: String } + type B { foo: String } + + interface AnotherInterface { + field: A + } + + type AnotherObject implements AnotherInterface { + field: B + } + """) + assert validate_schema(schema) == [{ + 'message': 'Interface field AnotherInterface.field' + ' expects type A but AnotherObject.field is type B.', + 'locations': [(10, 22), (14, 22)]}] + + def accepts_an_object_with_a_subtyped_interface_field_interface(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: AnotherInterface + } + + type AnotherObject implements AnotherInterface { + field: AnotherObject + } + """) + assert validate_schema(schema) == [] + + def accepts_an_object_with_a_subtyped_interface_field_union(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + type SomeObject { + field: String + } + + union SomeUnionType = SomeObject + + interface AnotherInterface { + field: SomeUnionType + } + + type AnotherObject implements AnotherInterface { + field: SomeObject + } + """) + assert validate_schema(schema) == [] + + def rejects_an_object_missing_an_interface_argument(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field(input: String): String + } + + type AnotherObject implements AnotherInterface { + field: String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Interface field argument' + ' AnotherInterface.field(input:) expected' + ' but AnotherObject.field does not provide it.', + 'locations': [(7, 21), (11, 15)]}] + + def rejects_an_object_with_an_incorrectly_typed_interface_argument(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field(input: String): String + } + + type AnotherObject implements AnotherInterface { + field(input: Int): String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Interface field argument' + ' AnotherInterface.field(input:) expects type String' + ' but AnotherObject.field(input:) is type Int.', + 'locations': [(7, 28), (11, 28)]}] + + def rejects_an_object_with_an_incorrectly_typed_field_and__argument(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field(input: String): String + } + + type AnotherObject implements AnotherInterface { + field(input: Int): Int + } + """) + assert validate_schema(schema) == [{ + 'message': 'Interface field AnotherInterface.field expects' + ' type String but AnotherObject.field is type Int.', + 'locations': [(7, 37), (11, 34)] + }, { + 'message': 'Interface field argument' + ' AnotherInterface.field(input:) expects type String' + ' but AnotherObject.field(input:) is type Int.', + 'locations': [(7, 28), (11, 28)] + }] + + def rejects_object_implementing_an_interface_field_with_additional_args(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field(input: String): String + } + + type AnotherObject implements AnotherInterface { + field(input: String, anotherInput: String!): String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Object field argument' + ' AnotherObject.field(anotherInput:) is of' + ' required type String! but is not also provided' + ' by the Interface field AnotherInterface.field.', + 'locations': [(11, 50), (7, 15)]}] + + def accepts_an_object_with_an_equivalently_wrapped_interface_field_type(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: [String]! + } + + type AnotherObject implements AnotherInterface { + field: [String]! + } + """) + assert validate_schema(schema) == [] + + def rejects_an_object_with_a_non_list_interface_field_list_type(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: [String] + } + + type AnotherObject implements AnotherInterface { + field: String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Interface field AnotherInterface.field expects type' + ' [String] but AnotherObject.field is type String.', + 'locations': [(7, 22), (11, 22)]}] + + def rejects_a_object_with_a_list_interface_field_non_list_type(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: String + } + + type AnotherObject implements AnotherInterface { + field: [String] + } + """) + assert validate_schema(schema) == [{ + 'message': 'Interface field AnotherInterface.field expects type' + ' String but AnotherObject.field is type [String].', + 'locations': [(7, 22), (11, 22)]}] + + def accepts_an_object_with_a_subset_non_null_interface_field_type(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: String + } + + type AnotherObject implements AnotherInterface { + field: String! + } + """) + assert validate_schema(schema) == [] + + def rejects_a_object_with_a_superset_nullable_interface_field_type(): + schema = build_schema(""" + type Query { + test: AnotherObject + } + + interface AnotherInterface { + field: String! + } + + type AnotherObject implements AnotherInterface { + field: String + } + """) + assert validate_schema(schema) == [{ + 'message': 'Interface field AnotherInterface.field expects type' + ' String! but AnotherObject.field is type String.', + 'locations': [(7, 22), (11, 22)]}] diff --git a/tests/utilities/__init__.py b/tests/utilities/__init__.py new file mode 100644 index 00000000..911ef26c --- /dev/null +++ b/tests/utilities/__init__.py @@ -0,0 +1 @@ +"""Tests for graphql.utilities""" diff --git a/tests/utilities/test_assert_valid_name.py b/tests/utilities/test_assert_valid_name.py new file mode 100644 index 00000000..aa7fa739 --- /dev/null +++ b/tests/utilities/test_assert_valid_name.py @@ -0,0 +1,28 @@ +from pytest import raises + +from graphql.error import GraphQLError +from graphql.utilities import assert_valid_name + + +def describe_assert_valid_name(): + + def throws_for_use_of_leading_double_underscore(): + with raises(GraphQLError) as exc_info: + assert assert_valid_name('__bad') + msg = exc_info.value.message + assert msg == ( + "Name '__bad' must not begin with '__'," + ' which is reserved by GraphQL introspection.') + + def throws_for_non_strings(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + assert_valid_name({}) + msg = str(exc_info.value) + assert msg == 'Expected string' + + def throws_for_names_with_invalid_characters(): + with raises(GraphQLError) as exc_info: + assert_valid_name('>--()-->') + msg = exc_info.value.message + assert 'Names must match' in msg diff --git a/tests/utilities/test_ast_from_value.py b/tests/utilities/test_ast_from_value.py new file mode 100644 index 00000000..fae4dd45 --- /dev/null +++ b/tests/utilities/test_ast_from_value.py @@ -0,0 +1,182 @@ +from math import nan + +from pytest import raises + +from graphql.error import INVALID +from graphql.language import ( + BooleanValueNode, EnumValueNode, FloatValueNode, + IntValueNode, ListValueNode, NameNode, NullValueNode, ObjectFieldNode, + ObjectValueNode, StringValueNode) +from graphql.type import ( + GraphQLBoolean, GraphQLEnumType, GraphQLFloat, + GraphQLID, GraphQLInputField, GraphQLInputObjectType, GraphQLInt, + GraphQLList, GraphQLNonNull, GraphQLString) +from graphql.utilities import ast_from_value + + +def describe_ast_from_value(): + + def converts_boolean_values_to_asts(): + assert ast_from_value( + True, GraphQLBoolean) == BooleanValueNode(value=True) + + assert ast_from_value( + False, GraphQLBoolean) == BooleanValueNode(value=False) + + assert ast_from_value(INVALID, GraphQLBoolean) is None + + assert ast_from_value(nan, GraphQLInt) is None + + assert ast_from_value(None, GraphQLBoolean) == NullValueNode() + + assert ast_from_value( + 0, GraphQLBoolean) == BooleanValueNode(value=False) + + assert ast_from_value( + 1, GraphQLBoolean) == BooleanValueNode(value=True) + + non_null_boolean = GraphQLNonNull(GraphQLBoolean) + assert ast_from_value( + 0, non_null_boolean) == BooleanValueNode(value=False) + + def converts_int_values_to_int_asts(): + assert ast_from_value(-1, GraphQLInt) == IntValueNode(value='-1') + + assert ast_from_value(123.0, GraphQLInt) == IntValueNode(value='123') + + assert ast_from_value(1e4, GraphQLInt) == IntValueNode(value='10000') + + # GraphQL spec does not allow coercing non-integer values to Int to + # avoid accidental data loss. + with raises(TypeError) as exc_info: + assert ast_from_value(123.5, GraphQLInt) + msg = str(exc_info.value) + assert msg == 'Int cannot represent non-integer value: 123.5' + + # Note: outside the bounds of 32bit signed int. + with raises(TypeError) as exc_info: + assert ast_from_value(1e40, GraphQLInt) + msg = str(exc_info.value) + assert msg == ( + 'Int cannot represent non 32-bit signed integer value: 1e+40') + + def converts_float_values_to_float_asts(): + # luckily in Python we can discern between float and int + assert ast_from_value(-1, GraphQLFloat) == FloatValueNode(value='-1') + + assert ast_from_value( + 123.0, GraphQLFloat) == FloatValueNode(value='123') + + assert ast_from_value( + 123.5, GraphQLFloat) == FloatValueNode(value='123.5') + + assert ast_from_value( + 1e4, GraphQLFloat) == FloatValueNode(value='10000') + + assert ast_from_value( + 1e40, GraphQLFloat) == FloatValueNode(value='1e+40') + + def converts_string_values_to_string_asts(): + assert ast_from_value( + 'hello', GraphQLString) == StringValueNode(value='hello') + + assert ast_from_value( + 'VALUE', GraphQLString) == StringValueNode(value='VALUE') + + assert ast_from_value( + 'VA\nLUE', GraphQLString) == StringValueNode(value='VA\nLUE') + + assert ast_from_value( + 123, GraphQLString) == StringValueNode(value='123') + + assert ast_from_value( + False, GraphQLString) == StringValueNode(value='false') + + assert ast_from_value(None, GraphQLString) == NullValueNode() + + assert ast_from_value(INVALID, GraphQLString) is None + + def converts_id_values_to_int_or_string_asts(): + assert ast_from_value( + 'hello', GraphQLID) == StringValueNode(value='hello') + + assert ast_from_value( + 'VALUE', GraphQLID) == StringValueNode(value='VALUE') + + # Note: EnumValues cannot contain non-identifier characters + assert ast_from_value( + 'VA\nLUE', GraphQLID) == StringValueNode(value='VA\nLUE') + + # Note: IntValues are used when possible. + assert ast_from_value(-1, GraphQLID) == IntValueNode(value='-1') + + assert ast_from_value(123, GraphQLID) == IntValueNode(value='123') + + assert ast_from_value('123', GraphQLID) == IntValueNode(value='123') + + assert ast_from_value('01', GraphQLID) == StringValueNode(value='01') + + with raises(TypeError) as exc_info: + assert ast_from_value(False, GraphQLID) + assert str(exc_info.value) == 'ID cannot represent value: False' + + assert ast_from_value(None, GraphQLID) == NullValueNode() + + assert ast_from_value(INVALID, GraphQLString) is None + + def does_not_convert_non_null_values_to_null_value(): + non_null_boolean = GraphQLNonNull(GraphQLBoolean) + assert ast_from_value(None, non_null_boolean) is None + + complex_value = {'someArbitrary': 'complexValue'} + + my_enum = GraphQLEnumType('MyEnum', { + 'HELLO': None, 'GOODBYE': None, 'COMPLEX': complex_value}) + + def converts_string_values_to_enum_asts_if_possible(): + assert ast_from_value('HELLO', my_enum) == EnumValueNode(value='HELLO') + + assert ast_from_value( + complex_value, my_enum) == EnumValueNode(value='COMPLEX') + + # Note: case sensitive + assert ast_from_value('hello', my_enum) is None + + # Note: not a valid enum value + assert ast_from_value('VALUE', my_enum) is None + + def converts_list_values_to_list_asts(): + assert ast_from_value( + ['FOO', 'BAR'], GraphQLList(GraphQLString) + ) == ListValueNode(values=[ + StringValueNode(value='FOO'), StringValueNode(value='BAR')]) + + assert ast_from_value( + ['HELLO', 'GOODBYE'], GraphQLList(my_enum) + ) == ListValueNode(values=[ + EnumValueNode(value='HELLO'), EnumValueNode(value='GOODBYE')]) + + def converts_list_singletons(): + assert ast_from_value( + 'FOO', GraphQLList(GraphQLString)) == StringValueNode(value='FOO') + + def converts_input_objects(): + input_obj = GraphQLInputObjectType('MyInputObj', { + 'foo': GraphQLInputField(GraphQLFloat), + 'bar': GraphQLInputField(my_enum)}) + + assert ast_from_value( + {'foo': 3, 'bar': 'HELLO'}, input_obj) == ObjectValueNode(fields=[ + ObjectFieldNode(name=NameNode(value='foo'), + value=FloatValueNode(value='3')), + ObjectFieldNode(name=NameNode(value='bar'), + value=EnumValueNode(value='HELLO'))]) + + def converts_input_objects_with_explicit_nulls(): + input_obj = GraphQLInputObjectType('MyInputObj', { + 'foo': GraphQLInputField(GraphQLFloat), + 'bar': GraphQLInputField(my_enum)}) + + assert ast_from_value({'foo': None}, input_obj) == ObjectValueNode( + fields=[ObjectFieldNode( + name=NameNode(value='foo'), value=NullValueNode())]) diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py new file mode 100644 index 00000000..8e0b1270 --- /dev/null +++ b/tests/utilities/test_build_ast_schema.py @@ -0,0 +1,936 @@ +from collections import namedtuple +from typing import cast + +from pytest import raises + +from graphql import graphql_sync +from graphql.language import parse, print_ast, DocumentNode +from graphql.type import ( + GraphQLDeprecatedDirective, GraphQLIncludeDirective, + GraphQLSkipDirective, GraphQLEnumType, GraphQLObjectType, + GraphQLInputObjectType, GraphQLInterfaceType, validate_schema) +from graphql.pyutils import dedent +from graphql.utilities import build_ast_schema, build_schema, print_schema + + +def cycle_output(body: str) -> str: + """Full cycle test. + + This function does a full cycle of going from a string with the contents of + the DSL, parsed in a schema AST, materializing that schema AST into an in- + memory GraphQLSchema, and then finally printing that GraphQL into the DSL. + """ + ast = parse(body) + schema = build_ast_schema(ast) + return print_schema(schema) + + +def describe_schema_builder(): + + def can_use_built_schema_for_limited_execution(): + schema = build_ast_schema(parse(""" + type Query { + str: String + } + """)) + + data = namedtuple('Data', 'str')(123) + + result = graphql_sync(schema, '{ str }', data) + assert result == ({'str': '123'}, None) + + def can_build_a_schema_directly_from_the_source(): + schema = build_schema(""" + type Query { + add(x: Int, y: Int): Int + } + """) + + # noinspection PyMethodMayBeStatic + class Root: + def add(self, _info, x, y): + return x + y + + assert graphql_sync(schema, '{ add(x: 34, y: 55) }', Root()) == ( + {'add': 89}, None) + + def simple_type(): + body = dedent(""" + type Query { + str: String + int: Int + float: Float + id: ID + bool: Boolean + } + """) + output = cycle_output(body) + assert output == body + + def with_directives(): + body = dedent(""" + directive @foo(arg: Int) on FIELD + + type Query { + str: String + } + """) + output = cycle_output(body) + assert output == body + + def supports_descriptions(): + body = dedent(''' + """This is a directive""" + directive @foo( + """It has an argument""" + arg: Int + ) on FIELD + + """With an enum""" + enum Color { + RED + + """Not a creative color""" + GREEN + BLUE + } + + """What a great type""" + type Query { + """And a field to boot""" + str: String + } + ''') + output = cycle_output(body) + assert output == body + + def maintains_skip_and_include_directives(): + body = dedent(""" + type Query { + str: String + } + """) + schema = build_ast_schema(parse(body)) + assert len(schema.directives) == 3 + assert schema.get_directive('skip') is GraphQLSkipDirective + assert schema.get_directive('include') is GraphQLIncludeDirective + assert schema.get_directive('deprecated') is GraphQLDeprecatedDirective + + def overriding_directives_excludes_specified(): + body = dedent(""" + directive @skip on FIELD + directive @include on FIELD + directive @deprecated on FIELD_DEFINITION + + type Query { + str: String + } + """) + schema = build_ast_schema(parse(body)) + assert len(schema.directives) == 3 + get_directive = schema.get_directive + assert get_directive('skip') is not GraphQLSkipDirective + assert get_directive('skip') is not None + assert get_directive('include') is not GraphQLIncludeDirective + assert get_directive('include') is not None + assert get_directive('deprecated') is not GraphQLDeprecatedDirective + assert get_directive('deprecated') is not None + + def overriding_skip_directive_excludes_built_in_one(): + body = dedent(""" + directive @skip on FIELD + + type Query { + str: String + } + """) + schema = build_ast_schema(parse(body)) + assert len(schema.directives) == 3 + assert schema.get_directive('skip') is not GraphQLSkipDirective + assert schema.get_directive('skip') is not None + assert schema.get_directive('include') is GraphQLIncludeDirective + assert schema.get_directive('deprecated') is GraphQLDeprecatedDirective + + def adding_directives_maintains_skip_and_include_directives(): + body = dedent(""" + directive @foo(arg: Int) on FIELD + + type Query { + str: String + } + """) + schema = build_ast_schema(parse(body)) + assert len(schema.directives) == 4 + assert schema.get_directive('skip') is GraphQLSkipDirective + assert schema.get_directive('include') is GraphQLIncludeDirective + assert schema.get_directive('deprecated') is GraphQLDeprecatedDirective + assert schema.get_directive('foo') is not None + + def type_modifiers(): + body = dedent(""" + type Query { + nonNullStr: String! + listOfStrs: [String] + listOfNonNullStrs: [String!] + nonNullListOfStrs: [String]! + nonNullListOfNonNullStrs: [String!]! + } + """) + output = cycle_output(body) + assert output == body + + def recursive_type(): + body = dedent(""" + type Query { + str: String + recurse: Query + } + """) + output = cycle_output(body) + assert output == body + + def two_types_circular(): + body = dedent(""" + schema { + query: TypeOne + } + + type TypeOne { + str: String + typeTwo: TypeTwo + } + + type TypeTwo { + str: String + typeOne: TypeOne + } + """) + output = cycle_output(body) + assert output == body + + def single_argument_field(): + body = dedent(""" + type Query { + str(int: Int): String + floatToStr(float: Float): String + idToStr(id: ID): String + booleanToStr(bool: Boolean): String + strToStr(bool: String): String + } + """) + output = cycle_output(body) + assert output == body + + def simple_type_with_multiple_arguments(): + body = dedent(""" + type Query { + str(int: Int, bool: Boolean): String + } + """) + output = cycle_output(body) + assert output == body + + def simple_type_with_interface(): + body = dedent(""" + type Query implements WorldInterface { + str: String + } + + interface WorldInterface { + str: String + } + """) + output = cycle_output(body) + assert output == body + + def simple_output_enum(): + body = dedent(""" + enum Hello { + WORLD + } + + type Query { + hello: Hello + } + """) + output = cycle_output(body) + assert output == body + + def simple_input_enum(): + body = dedent(""" + enum Hello { + WORLD + } + + type Query { + str(hello: Hello): String + } + """) + output = cycle_output(body) + assert output == body + + def multiple_value_enum(): + body = dedent(""" + enum Hello { + WO + RLD + } + + type Query { + hello: Hello + } + """) + output = cycle_output(body) + assert output == body + + def simple_union(): + body = dedent(""" + union Hello = World + + type Query { + hello: Hello + } + + type World { + str: String + } + """) + output = cycle_output(body) + assert output == body + + def multiple_union(): + body = dedent(""" + union Hello = WorldOne | WorldTwo + + type Query { + hello: Hello + } + + type WorldOne { + str: String + } + + type WorldTwo { + str: String + } + """) + output = cycle_output(body) + assert output == body + + def can_build_recursive_union(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + build_schema(""" + union Hello = Hello + + type Query { + hello: Hello + } + """) + msg = str(exc_info.value) + assert msg == 'Hello types must be GraphQLObjectType objects.' + + def specifying_union_type_using_typename(): + schema = build_schema(""" + type Query { + fruits: [Fruit] + } + + union Fruit = Apple | Banana + + type Apple { + color: String + } + + type Banana { + length: Int + } + """) + + query = """ + { + fruits { + ... on Apple { + color + } + ... on Banana { + length + } + } + } + """ + + root = { + 'fruits': [{ + 'color': 'green', + '__typename': 'Apple' + }, { + 'length': 5, + '__typename': 'Banana' + }] + } + + assert graphql_sync(schema, query, root) == ({ + 'fruits': [{'color': 'green'}, {'length': 5}]}, None) + + def specifying_interface_type_using_typename(): + schema = build_schema(""" + type Query { + characters: [Character] + } + + interface Character { + name: String! + } + + type Human implements Character { + name: String! + totalCredits: Int + } + + type Droid implements Character { + name: String! + primaryFunction: String + } + """) + + query = """ + { + characters { + name + ... on Human { + totalCredits + } + ... on Droid { + primaryFunction + } + } + } + """ + + root = { + 'characters': [{ + 'name': 'Han Solo', + 'totalCredits': 10, + '__typename': 'Human' + }, { + 'name': 'R2-D2', + 'primaryFunction': 'Astromech', + '__typename': 'Droid' + }] + } + + assert graphql_sync(schema, query, root) == ({ + 'characters': [{ + 'name': 'Han Solo', + 'totalCredits': 10 + }, { + 'name': 'R2-D2', + 'primaryFunction': 'Astromech' + }] + }, None) + + def custom_scalar(): + body = dedent(""" + scalar CustomScalar + + type Query { + customScalar: CustomScalar + } + """) + output = cycle_output(body) + assert output == body + + def input_object(): + body = dedent(""" + input Input { + int: Int + } + + type Query { + field(in: Input): String + } + """) + output = cycle_output(body) + assert output == body + + def simple_argument_field_with_default(): + body = dedent(""" + type Query { + str(int: Int = 2): String + } + """) + output = cycle_output(body) + assert output == body + + def custom_scalar_argument_field_with_default(): + body = dedent(""" + scalar CustomScalar + + type Query { + str(int: CustomScalar = 2): String + } + """) + output = cycle_output(body) + assert output == body + + def simple_type_with_mutation(): + body = dedent(""" + schema { + query: HelloScalars + mutation: Mutation + } + + type HelloScalars { + str: String + int: Int + bool: Boolean + } + + type Mutation { + addHelloScalars(str: String, int: Int, bool: Boolean): HelloScalars + } + """) # noqa + output = cycle_output(body) + assert output == body + + def simple_type_with_subscription(): + body = dedent(""" + schema { + query: HelloScalars + subscription: Subscription + } + + type HelloScalars { + str: String + int: Int + bool: Boolean + } + + type Subscription { + subscribeHelloScalars(str: String, int: Int, bool: Boolean): HelloScalars + } + """) # noqa + output = cycle_output(body) + assert output == body + + def unreferenced_type_implementing_referenced_interface(): + body = dedent(""" + type Concrete implements Iface { + key: String + } + + interface Iface { + key: String + } + + type Query { + iface: Iface + } + """) + output = cycle_output(body) + assert output == body + + def unreferenced_type_implementing_referenced_union(): + body = dedent(""" + type Concrete { + key: String + } + + type Query { + union: Union + } + + union Union = Concrete + """) + output = cycle_output(body) + assert output == body + + def supports_deprecated_directive(): + body = dedent(""" + enum MyEnum { + VALUE + OLD_VALUE @deprecated + OTHER_VALUE @deprecated(reason: "Terrible reasons") + } + + type Query { + field1: String @deprecated + field2: Int @deprecated(reason: "Because I said so") + enum: MyEnum + } + """) + output = cycle_output(body) + assert output == body + + ast = parse(body) + schema = build_ast_schema(ast) + + my_enum = schema.get_type('MyEnum') + my_enum = cast(GraphQLEnumType, my_enum) + + value = my_enum.values['VALUE'] + assert value.is_deprecated is False + + old_value = my_enum.values['OLD_VALUE'] + assert old_value.is_deprecated is True + assert old_value.deprecation_reason == 'No longer supported' + + other_value = my_enum.values['OTHER_VALUE'] + assert other_value.is_deprecated is True + assert other_value.deprecation_reason == 'Terrible reasons' + + root_fields = schema.get_type('Query').fields + field1 = root_fields['field1'] + assert field1.is_deprecated is True + assert field1.deprecation_reason == 'No longer supported' + field2 = root_fields['field2'] + assert field2.is_deprecated is True + assert field2.deprecation_reason == 'Because I said so' + + def correctly_assign_ast_nodes(): + schema_ast = parse(dedent(""" + schema { + query: Query + } + + type Query + { + testField(testArg: TestInput): TestUnion + } + + input TestInput + { + testInputField: TestEnum + } + + enum TestEnum + { + TEST_VALUE + } + + union TestUnion = TestType + + interface TestInterface + { + interfaceField: String + } + + type TestType implements TestInterface + { + interfaceField: String + } + + scalar TestScalar + + directive @test(arg: TestScalar) on FIELD + """)) + schema = build_ast_schema(schema_ast) + query = schema.get_type('Query') + query = cast(GraphQLObjectType, query) + test_input = schema.get_type('TestInput') + test_input = cast(GraphQLInputObjectType, test_input) + test_enum = schema.get_type('TestEnum') + test_enum = cast(GraphQLEnumType, test_enum) + test_union = schema.get_type('TestUnion') + test_interface = schema.get_type('TestInterface') + test_interface = cast(GraphQLInterfaceType, test_interface) + test_type = schema.get_type('TestType') + test_scalar = schema.get_type('TestScalar') + test_directive = schema.get_directive('test') + + restored_schema_ast = DocumentNode(definitions=[ + schema.ast_node, + query.ast_node, + test_input.ast_node, + test_enum.ast_node, + test_union.ast_node, + test_interface.ast_node, + test_type.ast_node, + test_scalar.ast_node, + test_directive.ast_node + ]) + assert print_ast(restored_schema_ast) == print_ast(schema_ast) + + test_field = query.fields['testField'] + assert print_ast(test_field.ast_node) == ( + 'testField(testArg: TestInput): TestUnion') + assert print_ast(test_field.args['testArg'].ast_node) == ( + 'testArg: TestInput') + assert print_ast(test_input.fields['testInputField'].ast_node) == ( + 'testInputField: TestEnum') + assert print_ast(test_enum.values['TEST_VALUE'].ast_node) == ( + 'TEST_VALUE') + assert print_ast(test_interface.fields['interfaceField'].ast_node) == ( + 'interfaceField: String') + assert print_ast(test_directive.args['arg'].ast_node) == ( + 'arg: TestScalar') + + def root_operation_type_with_custom_names(): + schema = build_schema(dedent(""" + schema { + query: SomeQuery + mutation: SomeMutation + subscription: SomeSubscription + } + type SomeQuery { str: String } + type SomeMutation { str: String } + type SomeSubscription { str: String } + """)) + + assert schema.query_type.name == 'SomeQuery' + assert schema.mutation_type.name == 'SomeMutation' + assert schema.subscription_type.name == 'SomeSubscription' + + def default_root_operation_type_names(): + schema = build_schema(dedent(""" + type Query { str: String } + type Mutation { str: String } + type Subscription { str: String } + """)) + + assert schema.query_type.name == 'Query' + assert schema.mutation_type.name == 'Mutation' + assert schema.subscription_type.name == 'Subscription' + + def can_build_invalid_schema(): + schema = build_schema(dedent(""" + # Invalid schema, because it is missing query root type + type Mutation { + str: String + } + """)) + errors = validate_schema(schema) + assert errors + + +def describe_failures(): + + def allows_only_a_single_schema_definition(): + body = dedent(""" + schema { + query: Hello + } + + schema { + query: Hello + } + + type Hello { + bar: Bar + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == 'Must provide only one schema definition.' + + def allows_only_a_single_query_type(): + body = dedent(""" + schema { + query: Hello + query: Yellow + } + + type Hello { + bar: Bar + } + + type Yellow { + isColor: Boolean + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == 'Must provide only one query type in schema.' + + def allows_only_a_single_mutation_type(): + body = dedent(""" + schema { + query: Hello + mutation: Hello + mutation: Yellow + } + + type Hello { + bar: Bar + } + + type Yellow { + isColor: Boolean + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == 'Must provide only one mutation type in schema.' + + def allows_only_a_single_subscription_type(): + body = dedent(""" + schema { + query: Hello + subscription: Hello + subscription: Yellow + } + type Hello { + bar: Bar + } + + type Yellow { + isColor: Boolean + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == 'Must provide only one subscription type in schema.' + + def unknown_type_referenced(): + body = dedent(""" + schema { + query: Hello + } + + type Hello { + bar: Bar + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert "Type 'Bar' not found in document." in msg + + def unknown_type_in_interface_list(): + body = dedent(""" + type Query implements Bar { + field: String + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert "Type 'Bar' not found in document." in msg + + def unknown_type_in_union_list(): + body = dedent(""" + union TestUnion = Bar + type Query { testUnion: TestUnion } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert "Type 'Bar' not found in document." in msg + + def unknown_query_type(): + body = dedent(""" + schema { + query: Wat + } + + type Hello { + str: String + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == "Specified query type 'Wat' not found in document." + + def unknown_mutation_type(): + body = dedent(""" + schema { + query: Hello + mutation: Wat + } + + type Hello { + str: String + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == "Specified mutation type 'Wat' not found in document." + + def unknown_subscription_type(): + body = dedent(""" + schema { + query: Hello + mutation: Wat + subscription: Awesome + } + + type Hello { + str: String + } + + type Wat { + str: String + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == ( + "Specified subscription type 'Awesome' not found in document.") + + def does_not_consider_operation_names(): + body = dedent(""" + schema { + query: Foo + } + + type Hello { + str: String + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == "Specified query type 'Foo' not found in document." + + def does_not_consider_fragment_names(): + body = dedent(""" + schema { + query: Foo + } + + fragment Foo on Type { field } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == "Specified query type 'Foo' not found in document." + + def forbids_duplicate_type_definitions(): + body = dedent(""" + schema { + query: Repeated + } + + type Repeated { + id: Int + } + + type Repeated { + id: String + } + """) + doc = parse(body) + with raises(TypeError) as exc_info: + build_ast_schema(doc) + msg = str(exc_info.value) + assert msg == "Type 'Repeated' was defined more than once." diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py new file mode 100644 index 00000000..f62186e3 --- /dev/null +++ b/tests/utilities/test_build_client_schema.py @@ -0,0 +1,435 @@ +from pytest import raises + +from graphql import graphql_sync +from graphql.language import DirectiveLocation +from graphql.type import ( + GraphQLArgument, GraphQLBoolean, GraphQLDirective, GraphQLEnumType, + GraphQLEnumValue, GraphQLField, GraphQLFloat, GraphQLID, + GraphQLInputField, GraphQLInputObjectType, GraphQLInt, + GraphQLInterfaceType, GraphQLList, GraphQLNonNull, GraphQLObjectType, + GraphQLScalarType, GraphQLSchema, GraphQLString, GraphQLUnionType) +from graphql.utilities import build_client_schema, introspection_from_schema + + +def check_schema(server_schema): + """Test that the client side introspection gives the same result. + + Given a server's schema, a client may query that server with introspection, + and use the result to produce a client-side representation of the schema + by using "build_client_schema". If the client then runs the introspection + query against the client-side schema, it should get a result identical to + what was returned by the server. + """ + initial_introspection = introspection_from_schema(server_schema) + client_schema = build_client_schema(initial_introspection) + second_introspection = introspection_from_schema(client_schema) + assert initial_introspection == second_introspection + + +def describe_type_system_build_schema_from_introspection(): + + def builds_a_simple_schema(): + schema = GraphQLSchema(GraphQLObjectType('Simple', { + 'string': GraphQLField( + GraphQLString, description='This is a string field')}, + description='This is a simple type')) + check_schema(schema) + + def builds_a_simple_schema_with_all_operation_types(): + query_type = GraphQLObjectType('QueryType', { + 'string': GraphQLField( + GraphQLString, description='This is a string field.')}, + description='This is a simple query type') + + mutation_type = GraphQLObjectType('MutationType', { + 'setString': GraphQLField( + GraphQLString, description='Set the string field', args={ + 'value': GraphQLArgument(GraphQLString)})}, + description='This is a simple mutation type') + + subscription_type = GraphQLObjectType('SubscriptionType', { + 'string': GraphQLField( + GraphQLString, description='This is a string field')}, + description='This is a simple subscription type') + + schema = GraphQLSchema(query_type, mutation_type, subscription_type) + check_schema(schema) + + def uses_built_in_scalars_when_possible(): + custom_scalar = GraphQLScalarType( + 'CustomScalar', serialize=lambda: None) + + schema = GraphQLSchema(GraphQLObjectType('Scalars', { + 'int': GraphQLField(GraphQLInt), + 'float': GraphQLField(GraphQLFloat), + 'string': GraphQLField(GraphQLString), + 'boolean': GraphQLField(GraphQLBoolean), + 'id': GraphQLField(GraphQLID), + 'custom': GraphQLField(custom_scalar)})) + + check_schema(schema) + + introspection = introspection_from_schema(schema) + client_schema = build_client_schema(introspection) + + # Built-ins are used + assert client_schema.get_type('Int') is GraphQLInt + assert client_schema.get_type('Float') is GraphQLFloat + assert client_schema.get_type('String') is GraphQLString + assert client_schema.get_type('Boolean') is GraphQLBoolean + assert client_schema.get_type('ID') is GraphQLID + + # Custom are built + assert client_schema.get_type('CustomScalar') is not custom_scalar + + def builds_a_schema_with_a_recursive_type_reference(): + recur_type = GraphQLObjectType( + 'Recur', lambda: {'recur': GraphQLField(recur_type)}) + schema = GraphQLSchema(recur_type) + + check_schema(schema) + + def builds_a_schema_with_a_circular_type_reference(): + dog_type = GraphQLObjectType( + 'Dog', lambda: {'bestFriend': GraphQLField(human_type)}) + human_type = GraphQLObjectType( + 'Human', lambda: {'bestFriend': GraphQLField(dog_type)}) + schema = GraphQLSchema(GraphQLObjectType('Circular', { + 'dog': GraphQLField(dog_type), + 'human': GraphQLField(human_type)})) + + check_schema(schema) + + def builds_a_schema_with_an_interface(): + friendly_type = GraphQLInterfaceType('Friendly', lambda: { + 'bestFriend': GraphQLField( + friendly_type, + description='The best friend of this friendly thing.')}) + dog_type = GraphQLObjectType('DogType', lambda: { + 'bestFriend': GraphQLField(friendly_type)}, interfaces=[ + friendly_type]) + human_type = GraphQLObjectType('Human', lambda: { + 'bestFriend': GraphQLField(friendly_type)}, interfaces=[ + friendly_type]) + schema = GraphQLSchema( + GraphQLObjectType('WithInterface', { + 'friendly': GraphQLField(friendly_type)}), + types=[dog_type, human_type]) + + check_schema(schema) + + def builds_a_schema_with_an_implicit_interface(): + friendly_type = GraphQLInterfaceType('Friendly', lambda: { + 'bestFriend': GraphQLField( + friendly_type, + description='The best friend of this friendly thing.')}) + dog_type = GraphQLObjectType('DogType', lambda: { + 'bestFriend': GraphQLField(dog_type)}, interfaces=[friendly_type]) + schema = GraphQLSchema(GraphQLObjectType('WithInterface', { + 'dog': GraphQLField(dog_type)})) + + check_schema(schema) + + def builds_a_schema_with_a_union(): + dog_type = GraphQLObjectType( + 'Dog', lambda: {'bestFriend': GraphQLField(friendly_type)}) + human_type = GraphQLObjectType( + 'Human', lambda: {'bestFriend': GraphQLField(friendly_type)}) + friendly_type = GraphQLUnionType( + 'Friendly', types=[dog_type, human_type]) + schema = GraphQLSchema(GraphQLObjectType('WithUnion', { + 'friendly': GraphQLField(friendly_type)})) + + check_schema(schema) + + def builds_a_schema_with_complex_field_values(): + schema = GraphQLSchema(GraphQLObjectType('ComplexFields', { + 'string': GraphQLField(GraphQLString), + 'listOfString': GraphQLField(GraphQLList(GraphQLString)), + 'nonNullString': GraphQLField(GraphQLNonNull(GraphQLString)), + 'nonNullListOfString': GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLString))), + 'nonNullListOfNonNullString': GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLString))))})) + + check_schema(schema) + + def builds_a_schema_with_field_arguments(): + schema = GraphQLSchema(GraphQLObjectType('ArgFields', { + 'one': GraphQLField( + GraphQLString, description='A field with a single arg', args={ + 'intArg': GraphQLArgument( + GraphQLInt, description='This is an int arg')}), + 'two': GraphQLField( + GraphQLString, description='A field with two args', args={ + 'listArg': GraphQLArgument( + GraphQLList(GraphQLInt), + description='This is a list of int arg'), + 'requiredArg': GraphQLArgument( + GraphQLNonNull(GraphQLBoolean), + description='This is a required arg')})})) + + check_schema(schema) + + def builds_a_schema_with_default_value_on_custom_scalar_field(): + schema = GraphQLSchema(GraphQLObjectType('ArgFields', { + 'testField': GraphQLField(GraphQLString, args={ + 'testArg': GraphQLArgument(GraphQLScalarType( + 'CustomScalar', serialize=lambda value: value), + default_value='default')})})) + + check_schema(schema) + + def builds_a_schema_with_an_enum(): + food_enum = GraphQLEnumType('Food', { + 'VEGETABLES': GraphQLEnumValue( + 1, description='Foods that are vegetables.'), + 'FRUITS': GraphQLEnumValue( + 2, description='Foods that are fruits.'), + 'OILS': GraphQLEnumValue( + 3, description='Foods that are oils.'), + 'DAIRY': GraphQLEnumValue( + 4, description='Foods that are dairy.'), + 'MEAT': GraphQLEnumValue( + 5, description='Foods that are meat.')}, + description='Varieties of food stuffs') + + schema = GraphQLSchema(GraphQLObjectType('EnumFields', { + 'food': GraphQLField(food_enum, args={ + 'kind': GraphQLArgument( + food_enum, description='what kind of food?')}, + description='Repeats the arg you give it')})) + + check_schema(schema) + + introspection = introspection_from_schema(schema) + client_schema = build_client_schema(introspection) + client_food_enum = client_schema.get_type('Food') + + # It's also an Enum type on the client. + assert isinstance(client_food_enum, GraphQLEnumType) + + values = client_food_enum.values + descriptions = { + name: value.description for name, value in values.items()} + assert descriptions == { + 'VEGETABLES': 'Foods that are vegetables.', + 'FRUITS': 'Foods that are fruits.', + 'OILS': 'Foods that are oils.', + 'DAIRY': 'Foods that are dairy.', + 'MEAT': 'Foods that are meat.'} + values = values.values() + assert all(value.value is None for value in values) + assert all(value.is_deprecated is False for value in values) + assert all(value.deprecation_reason is None for value in values) + assert all(value.ast_node is None for value in values) + + def builds_a_schema_with_an_input_object(): + address_type = GraphQLInputObjectType('Address', { + 'street': GraphQLInputField( + GraphQLNonNull(GraphQLString), + description='What street is this address?'), + 'city': GraphQLInputField( + GraphQLNonNull(GraphQLString), + description='The city the address is within?'), + 'country': GraphQLInputField( + GraphQLString, default_value='USA', + description='The country (blank will assume USA).')}, + description='An input address') + + schema = GraphQLSchema(GraphQLObjectType('HasInputObjectFields', { + 'geocode': GraphQLField(GraphQLString, args={ + 'address': GraphQLArgument( + address_type, description='The address to lookup')}, + description='Get a geocode from an address')})) + + check_schema(schema) + + def builds_a_schema_with_field_arguments_with_default_values(): + geo_type = GraphQLInputObjectType('Geo', { + 'lat': GraphQLInputField(GraphQLFloat), + 'lon': GraphQLInputField(GraphQLFloat)}) + + schema = GraphQLSchema(GraphQLObjectType('ArgFields', { + 'defaultInt': GraphQLField(GraphQLString, args={ + 'intArg': GraphQLArgument(GraphQLInt, default_value=10)}), + 'defaultList': GraphQLField(GraphQLString, args={ + 'listArg': GraphQLArgument( + GraphQLList(GraphQLInt), default_value=[1, 2, 3])}), + 'defaultObject': GraphQLField(GraphQLString, args={ + 'objArg': GraphQLArgument( + geo_type, + default_value={'lat': 37.485, 'lon': -122.148})}), + 'defaultNull': GraphQLField(GraphQLString, args={ + 'intArg': GraphQLArgument(GraphQLInt, default_value=None)}), + 'noDefaults': GraphQLField(GraphQLString, args={ + 'intArg': GraphQLArgument(GraphQLInt)})})) + + check_schema(schema) + + def builds_a_schema_with_custom_directives(): + schema = GraphQLSchema( + GraphQLObjectType('Simple', { + 'string': GraphQLField( + GraphQLString, description='This is a string field')}, + description='This is a simple type'), + directives=[GraphQLDirective( + 'customDirective', [DirectiveLocation.FIELD], + description='This is a custom directive')]) + + check_schema(schema) + + def builds_a_schema_aware_of_deprecation(): + schema = GraphQLSchema(GraphQLObjectType('Simple', { + 'shinyString': GraphQLField( + GraphQLString, description='This is a shiny string field'), + 'deprecatedString': GraphQLField( + GraphQLString, description='This is a deprecated string field', + deprecation_reason='Use shinyString'), + 'color': GraphQLField( + GraphQLEnumType('Color', { + 'RED': GraphQLEnumValue(description='So rosy'), + 'GREEN': GraphQLEnumValue(description='So grassy'), + 'BLUE': GraphQLEnumValue(description='So calming'), + 'MAUVE': GraphQLEnumValue( + description='So sickening', + deprecation_reason='No longer in fashion')}))}, + description='This is a simple type')) + + check_schema(schema) + + def can_use_client_schema_for_limited_execution(): + custom_scalar = GraphQLScalarType( + 'CustomScalar', serialize=lambda: None) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'foo': GraphQLField(GraphQLString, args={ + 'custom1': GraphQLArgument(custom_scalar), + 'custom2': GraphQLArgument(custom_scalar)})})) + + introspection = introspection_from_schema(schema) + client_schema = build_client_schema(introspection) + + class Data: + foo = 'bar' + unused = 'value' + + result = graphql_sync( + client_schema, + 'query Limited($v: CustomScalar) {' + ' foo(custom1: 123, custom2: $v) }', + Data(), variable_values={'v': 'baz'}) + + assert result.data == {'foo': 'bar'} + + +def describe_throws_when_given_incomplete_introspection(): + + def throws_when_given_empty_types(): + incomplete_introspection = { + '__schema': { + 'queryType': {'name': 'QueryType'}, + 'types': [] + } + } + + with raises(TypeError) as exc_info: + build_client_schema(incomplete_introspection) + + assert str(exc_info.value) == ( + 'Invalid or incomplete schema, unknown type: QueryType.' + ' Ensure that a full introspection query is used' + ' in order to build a client schema.') + + def throws_when_missing_kind(): + incomplete_introspection = { + '__schema': { + 'queryType': {'name': 'QueryType'}, + 'types': [{ + 'name': 'QueryType' + }] + } + } + + with raises(TypeError) as exc_info: + build_client_schema(incomplete_introspection) + + assert str(exc_info.value) == ( + 'Invalid or incomplete introspection result.' + ' Ensure that a full introspection query is used' + " in order to build a client schema: {'name': 'QueryType'}") + + def throws_when_missing_interfaces(): + null_interface_introspection = { + '__schema': { + 'queryType': {'name': 'QueryType'}, + 'types': [{ + 'kind': 'OBJECT', + 'name': 'QueryType', + 'fields': [{ + 'name': 'aString', + 'args': [], + 'type': { + 'kind': 'SCALAR', 'name': 'String', + 'ofType': None}, + 'isDeprecated': False + }] + }] + } + } + + with raises(TypeError) as exc_info: + build_client_schema(null_interface_introspection) + + assert str(exc_info.value) == ( + 'Introspection result missing interfaces:' + " {'kind': 'OBJECT', 'name': 'QueryType'," + " 'fields': [{'name': 'aString', 'args': []," + " 'type': {'kind': 'SCALAR', 'name': 'String', 'ofType': None}," + " 'isDeprecated': False}]}") + + +def describe_very_deep_decorators_are_not_supported(): + + def fails_on_very_deep_lists_more_than_7_levels(): + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'foo': GraphQLField( + GraphQLList(GraphQLList(GraphQLList(GraphQLList( + GraphQLList(GraphQLList(GraphQLList(GraphQLList( + GraphQLString)))))))))})) + + introspection = introspection_from_schema(schema) + + with raises(TypeError) as exc_info: + build_client_schema(introspection) + + assert str(exc_info.value) == ( + 'Query fields cannot be resolved:' + ' Decorated type deeper than introspection query.') + + def fails_on_a_very_deep_non_null_more_than_7_levels(): + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'foo': GraphQLField( + GraphQLList(GraphQLNonNull(GraphQLList(GraphQLNonNull( + GraphQLList(GraphQLNonNull(GraphQLList(GraphQLNonNull( + GraphQLString)))))))))})) + + introspection = introspection_from_schema(schema) + + with raises(TypeError) as exc_info: + build_client_schema(introspection) + + assert str(exc_info.value) == ( + 'Query fields cannot be resolved:' + ' Decorated type deeper than introspection query.') + + def succeeds_on_deep_types_less_or_equal_7_levels(): + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'foo': GraphQLField( + # e.g., fully non-null 3D matrix + GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLList( + GraphQLNonNull(GraphQLList(GraphQLNonNull( + GraphQLString))))))))})) + + introspection = introspection_from_schema(schema) + build_client_schema(introspection) diff --git a/tests/utilities/test_coerce_value.py b/tests/utilities/test_coerce_value.py new file mode 100644 index 00000000..ddab6d4b --- /dev/null +++ b/tests/utilities/test_coerce_value.py @@ -0,0 +1,231 @@ +from math import inf, nan +from typing import Any, List + +from graphql.error import INVALID +from graphql.type import ( + GraphQLEnumType, GraphQLFloat, GraphQLID, GraphQLInputField, + GraphQLInputObjectType, GraphQLInt, GraphQLNonNull, GraphQLString) +from graphql.utilities import coerce_value +from graphql.utilities.coerce_value import CoercedValue + + +def expect_value(result: CoercedValue) -> Any: + assert result.errors is None + return result.value + + +def expect_error(result: CoercedValue) -> List[str]: + errors = result.errors + messages = errors and [error.message for error in errors] + assert result.value is INVALID + return messages + + +def describe_coerce_value(): + + def describe_for_graphql_string(): + + def returns_error_for_array_input_as_string(): + result = coerce_value([1, 2, 3], GraphQLString) + assert expect_error(result) == [ + f'Expected type String;' + ' String cannot represent a non string value: [1, 2, 3]'] + + def describe_for_graphql_id(): + + def returns_error_for_array_input_as_string(): + result = coerce_value([1, 2, 3], GraphQLID) + assert expect_error(result) == [ + f'Expected type ID;' + ' ID cannot represent value: [1, 2, 3]'] + + def describe_for_graphql_int(): + + def returns_value_for_integer(): + result = coerce_value(1, GraphQLInt) + assert expect_value(result) == 1 + + def returns_no_error_for_numeric_looking_string(): + result = coerce_value('1', GraphQLInt) + assert expect_error(result) == [ + f'Expected type Int;' + " Int cannot represent non-integer value: '1'"] + + def returns_value_for_negative_int_input(): + result = coerce_value(-1, GraphQLInt) + assert expect_value(result) == -1 + + def returns_value_for_exponent_input(): + result = coerce_value(1e3, GraphQLInt) + assert expect_value(result) == 1000 + + def returns_null_for_null_value(): + result = coerce_value(None, GraphQLInt) + assert expect_value(result) is None + + def returns_a_single_error_for_empty_string_as_value(): + result = coerce_value('', GraphQLInt) + assert expect_error(result) == [ + 'Expected type Int; Int cannot represent' + " non-integer value: ''"] + + def returns_a_single_error_for_2_32_input_as_int(): + result = coerce_value(1 << 32, GraphQLInt) + assert expect_error(result) == [ + 'Expected type Int; Int cannot represent' + ' non 32-bit signed integer value: 4294967296'] + + def returns_a_single_error_for_float_input_as_int(): + result = coerce_value(1.5, GraphQLInt) + assert expect_error(result) == [ + 'Expected type Int;' + " Int cannot represent non-integer value: 1.5"] + + def returns_a_single_error_for_nan_input_as_int(): + result = coerce_value(nan, GraphQLInt) + assert expect_error(result) == [ + 'Expected type Int;' + ' Int cannot represent non-integer value: nan'] + + def returns_a_single_error_for_infinity_input_as_int(): + result = coerce_value(inf, GraphQLInt) + assert expect_error(result) == [ + 'Expected type Int;' + ' Int cannot represent non-integer value: inf'] + + def returns_a_single_error_for_char_input(): + result = coerce_value('a', GraphQLInt) + assert expect_error(result) == [ + 'Expected type Int;' + " Int cannot represent non-integer value: 'a'"] + + def returns_a_single_error_for_string_input(): + result = coerce_value('meow', GraphQLInt) + assert expect_error(result) == [ + 'Expected type Int;' + " Int cannot represent non-integer value: 'meow'"] + + def describe_for_graphql_float(): + + def returns_value_for_integer(): + result = coerce_value(1, GraphQLFloat) + assert expect_value(result) == 1 + + def returns_value_for_decimal(): + result = coerce_value(1.1, GraphQLFloat) + assert expect_value(result) == 1.1 + + def returns_no_error_for_exponent_input(): + result = coerce_value(1e3, GraphQLFloat) + assert expect_value(result) == 1000 + + def returns_error_for_numeric_looking_string(): + result = coerce_value('1', GraphQLFloat) + assert expect_error(result) == [ + 'Expected type Float;' + " Float cannot represent non numeric value: '1'"] + + def returns_null_for_null_value(): + result = coerce_value(None, GraphQLFloat) + assert expect_value(result) is None + + def returns_a_single_error_for_empty_string_input(): + result = coerce_value('', GraphQLFloat) + assert expect_error(result) == [ + 'Expected type Float;' + " Float cannot represent non numeric value: ''"] + + def returns_a_single_error_for_nan_input(): + result = coerce_value(nan, GraphQLFloat) + assert expect_error(result) == [ + 'Expected type Float;' + ' Float cannot represent non numeric value: nan'] + + def returns_a_single_error_for_infinity_input(): + result = coerce_value(inf, GraphQLFloat) + assert expect_error(result) == [ + 'Expected type Float;' + ' Float cannot represent non numeric value: inf'] + + def returns_a_single_error_for_char_input(): + result = coerce_value('a', GraphQLFloat) + assert expect_error(result) == [ + 'Expected type Float;' + " Float cannot represent non numeric value: 'a'"] + + def returns_a_single_error_for_string_input(): + result = coerce_value('meow', GraphQLFloat) + assert expect_error(result) == [ + 'Expected type Float;' + " Float cannot represent non numeric value: 'meow'"] + + def describe_for_graphql_enum(): + TestEnum = GraphQLEnumType('TestEnum', { + 'FOO': 'InternalFoo', 'BAR': 123456789}) + + def returns_no_error_for_a_known_enum_name(): + foo_result = coerce_value('FOO', TestEnum) + assert expect_value(foo_result) == 'InternalFoo' + + bar_result = coerce_value('BAR', TestEnum) + assert expect_value(bar_result) == 123456789 + + def results_error_for_misspelled_enum_value(): + result = coerce_value('foo', TestEnum) + assert expect_error(result) == [ + 'Expected type TestEnum; did you mean FOO?'] + + def results_error_for_incorrect_value_type(): + result1 = coerce_value(123, TestEnum) + assert expect_error(result1) == ['Expected type TestEnum.'] + + result2 = coerce_value({'field': 'value'}, TestEnum) + assert expect_error(result2) == ['Expected type TestEnum.'] + + def describe_for_graphql_input_object(): + TestInputObject = GraphQLInputObjectType('TestInputObject', { + 'foo': GraphQLInputField(GraphQLNonNull(GraphQLInt)), + 'bar': GraphQLInputField(GraphQLInt)}) + + def returns_no_error_for_a_valid_input(): + result = coerce_value({'foo': 123}, TestInputObject) + assert expect_value(result) == {'foo': 123} + + def returns_error_for_a_non_dict_value(): + result = coerce_value(123, TestInputObject) + assert expect_error(result) == [ + 'Expected type TestInputObject to be a dict.'] + + def returns_error_for_an_invalid_field(): + result = coerce_value({'foo': 'abc'}, TestInputObject) + assert expect_error(result) == [ + 'Expected type Int at value.foo;' + " Int cannot represent non-integer value: 'abc'"] + + def returns_multiple_errors_for_multiple_invalid_fields(): + result = coerce_value( + {'foo': 'abc', 'bar': 'def'}, TestInputObject) + assert expect_error(result) == [ + 'Expected type Int at value.foo;' + " Int cannot represent non-integer value: 'abc'", + 'Expected type Int at value.bar;' + " Int cannot represent non-integer value: 'def'"] + + def returns_error_for_a_missing_required_field(): + result = coerce_value({'bar': 123}, TestInputObject) + assert expect_error(result) == [ + 'Field value.foo' + ' of required type Int! was not provided.'] + + def returns_error_for_an_unknown_field(): + result = coerce_value( + {'foo': 123, 'unknownField': 123}, TestInputObject) + assert expect_error(result) == [ + "Field 'unknownField' is not defined" + ' by type TestInputObject.'] + + def returns_error_for_a_misspelled_field(): + result = coerce_value({'foo': 123, 'bart': 123}, TestInputObject) + assert expect_error(result) == [ + "Field 'bart' is not defined" + ' by type TestInputObject; did you mean bar?'] diff --git a/tests/utilities/test_concat_ast.py b/tests/utilities/test_concat_ast.py new file mode 100644 index 00000000..2e200c5e --- /dev/null +++ b/tests/utilities/test_concat_ast.py @@ -0,0 +1,33 @@ +from graphql.language import parse, print_ast, Source +from graphql.pyutils import dedent +from graphql.utilities import concat_ast + + +def describe_concat_ast(): + + def concats_two_acts_together(): + source_a = Source(""" + { a, b, ... Frag } + """) + + source_b = Source(""" + fragment Frag on T { + c + } + """) + + ast_a = parse(source_a) + ast_b = parse(source_b) + ast_c = concat_ast([ast_a, ast_b]) + + assert print_ast(ast_c) == dedent(""" + { + a + b + ...Frag + } + + fragment Frag on T { + c + } + """) diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py new file mode 100644 index 00000000..223943a1 --- /dev/null +++ b/tests/utilities/test_extend_schema.py @@ -0,0 +1,1241 @@ +from pytest import raises + +from graphql import graphql_sync +from graphql.error import GraphQLError +from graphql.language import ( + parse, print_ast, DirectiveLocation, DocumentNode) +from graphql.pyutils import dedent +from graphql.type import ( + GraphQLArgument, GraphQLDirective, GraphQLEnumType, GraphQLEnumValue, + GraphQLField, GraphQLID, GraphQLInputField, GraphQLInputObjectType, + GraphQLInterfaceType, GraphQLList, GraphQLNonNull, GraphQLObjectType, + GraphQLScalarType, GraphQLSchema, GraphQLString, GraphQLUnionType, + is_non_null_type, is_scalar_type, specified_directives, validate_schema) +from graphql.utilities import extend_schema, print_schema + +# Test schema. + +SomeScalarType = GraphQLScalarType( + name='SomeScalar', + serialize=lambda x: x) + +SomeInterfaceType = GraphQLInterfaceType( + name='SomeInterface', + fields=lambda: { + 'name': GraphQLField(GraphQLString), + 'some': GraphQLField(SomeInterfaceType)}) + +FooType = GraphQLObjectType( + name='Foo', + interfaces=[SomeInterfaceType], + fields=lambda: { + 'name': GraphQLField(GraphQLString), + 'some': GraphQLField(SomeInterfaceType), + 'tree': GraphQLField(GraphQLNonNull(GraphQLList(FooType)))}) + +BarType = GraphQLObjectType( + name='Bar', + interfaces=[SomeInterfaceType], + fields=lambda: { + 'name': GraphQLField(GraphQLString), + 'some': GraphQLField(SomeInterfaceType), + 'foo': GraphQLField(FooType)}) + +BizType = GraphQLObjectType( + name='Biz', + fields=lambda: { + 'fizz': GraphQLField(GraphQLString)}) + +SomeUnionType = GraphQLUnionType( + name='SomeUnion', + types=[FooType, BizType]) + +SomeEnumType = GraphQLEnumType( + name='SomeEnum', + values={ + 'ONE': GraphQLEnumValue(1), + 'TWO': GraphQLEnumValue(2)}) + +SomeInputType = GraphQLInputObjectType('SomeInput', lambda: { + 'fooArg': GraphQLInputField(GraphQLString)}) + +test_schema = GraphQLSchema( + query=GraphQLObjectType( + name='Query', + fields=lambda: { + 'foo': GraphQLField(FooType), + 'someScalar': GraphQLField(SomeScalarType), + 'someUnion': GraphQLField(SomeUnionType), + 'someEnum': GraphQLField(SomeEnumType), + 'someInterface': GraphQLField( + SomeInterfaceType, + args={'id': GraphQLArgument(GraphQLNonNull(GraphQLID))}), + 'someInput': GraphQLField( + GraphQLString, + args={'input': GraphQLArgument(SomeInputType)})}), + types=[FooType, BarType], + directives=specified_directives + (GraphQLDirective( + 'foo', args={'input': GraphQLArgument(SomeInputType)}, locations=[ + DirectiveLocation.SCHEMA, + DirectiveLocation.SCALAR, + DirectiveLocation.OBJECT, + DirectiveLocation.FIELD_DEFINITION, + DirectiveLocation.ARGUMENT_DEFINITION, + DirectiveLocation.INTERFACE, + DirectiveLocation.UNION, + DirectiveLocation.ENUM, + DirectiveLocation.ENUM_VALUE, + DirectiveLocation.INPUT_OBJECT, + DirectiveLocation.INPUT_FIELD_DEFINITION]),)) + + +def extend_test_schema(sdl, **options) -> GraphQLSchema: + original_print = print_schema(test_schema) + ast = parse(sdl) + extended_schema = extend_schema(test_schema, ast, **options) + assert print_schema(test_schema) == original_print + return extended_schema + + +test_schema_ast = parse(print_schema(test_schema)) +test_schema_definitions = [ + print_ast(node) for node in test_schema_ast.definitions] + + +def print_test_schema_changes(extended_schema): + ast = parse(print_schema(extended_schema)) + ast.definitions = [node for node in ast.definitions + if print_ast(node) not in test_schema_definitions] + return print_ast(ast) + + +def describe_extend_schema(): + + def returns_the_original_schema_when_there_are_no_type_definitions(): + extended_schema = extend_test_schema('{ field }') + assert extended_schema == test_schema + + def extends_without_altering_original_schema(): + extended_schema = extend_test_schema(""" + extend type Query { + newField: String + } + """) + assert extend_schema != test_schema + assert 'newField' in print_schema(extended_schema) + assert 'newField' not in print_schema(test_schema) + + def can_be_used_for_limited_execution(): + extended_schema = extend_test_schema(""" + extend type Query { + newField: String + } + """) + + result = graphql_sync(extended_schema, + '{ newField }', {'newField': 123}) + assert result == ({'newField': '123'}, None) + + def can_describe_the_extended_fields(): + extended_schema = extend_test_schema(""" + extend type Query { + "New field description." + newField: String + } + """) + + assert extended_schema.get_type( + 'Query').fields['newField'].description == 'New field description.' + + def extends_objects_by_adding_new_fields(): + extended_schema = extend_test_schema(""" + extend type Foo { + newField: String + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + type Foo implements SomeInterface { + name: String + some: SomeInterface + tree: [Foo]! + newField: String + } + """) + + foo_type = extended_schema.get_type('Foo') + foo_field = extended_schema.get_type('Query').fields['foo'] + assert foo_field.type == foo_type + + def extends_enums_by_adding_new_values(): + extended_schema = extend_test_schema(""" + extend enum SomeEnum { + NEW_ENUM + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + enum SomeEnum { + ONE + TWO + NEW_ENUM + } + """) + + some_enum_type = extended_schema.get_type('SomeEnum') + enum_field = extended_schema.get_type('Query').fields['someEnum'] + assert enum_field.type == some_enum_type + + def extends_unions_by_adding_new_types(): + extended_schema = extend_test_schema(""" + extend union SomeUnion = Bar + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + union SomeUnion = Foo | Biz | Bar + """) + + some_union_type = extended_schema.get_type('SomeUnion') + union_field = extended_schema.get_type('Query').fields['someUnion'] + assert union_field.type == some_union_type + + def allows_extension_of_union_by_adding_itself(): + # invalid schema cannot be built with Python + with raises(TypeError) as exc_info: + extend_test_schema(""" + extend union SomeUnion = SomeUnion + """) + msg = str(exc_info.value) + assert msg == 'SomeUnion types must be GraphQLObjectType objects.' + + def extends_inputs_by_adding_new_fields(): + extended_schema = extend_test_schema(""" + extend input SomeInput { + newField: String + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + input SomeInput { + fooArg: String + newField: String + } + """) + + some_input_type = extended_schema.get_type('SomeInput') + input_field = extended_schema.get_type('Query').fields['someInput'] + assert input_field.args['input'].type == some_input_type + + foo_directive = extended_schema.get_directive('foo') + assert foo_directive.args['input'].type == some_input_type + + def extends_scalars_by_adding_new_directives(): + extended_schema = extend_test_schema(""" + extend scalar SomeScalar @foo + """) + + some_scalar = extended_schema.get_type('SomeScalar') + assert len(some_scalar.extension_ast_nodes) == 1 + assert print_ast(some_scalar.extension_ast_nodes[0]) == ( + 'extend scalar SomeScalar @foo') + + def correctly_assigns_ast_nodes_to_new_and_extended_types(): + extended_schema = extend_test_schema(""" + extend type Query { + newField(testArg: TestInput): TestEnum + } + + extend scalar SomeScalar @foo + + extend enum SomeEnum { + NEW_VALUE + } + + extend union SomeUnion = Bar + + extend input SomeInput { + newField: String + } + + extend interface SomeInterface { + newField: String + } + + enum TestEnum { + TEST_VALUE + } + + input TestInput { + testInputField: TestEnum + } + """) + ast = parse(""" + extend type Query { + oneMoreNewField: TestUnion + } + + extend scalar SomeScalar @test + + extend enum SomeEnum { + ONE_MORE_NEW_VALUE + } + + extend union SomeUnion = TestType + + extend input SomeInput { + oneMoreNewField: String + } + + extend interface SomeInterface { + oneMoreNewField: String + } + + union TestUnion = TestType + + interface TestInterface { + interfaceField: String + } + + type TestType implements TestInterface { + interfaceField: String + } + + directive @test(arg: Int) on FIELD | SCALAR + """) + extended_twice_schema = extend_schema(extended_schema, ast) + + query = extended_twice_schema.get_type('Query') + some_scalar = extended_twice_schema.get_type('SomeScalar') + some_enum = extended_twice_schema.get_type('SomeEnum') + some_union = extended_twice_schema.get_type('SomeUnion') + some_input = extended_twice_schema.get_type('SomeInput') + some_interface = extended_twice_schema.get_type('SomeInterface') + + test_input = extended_twice_schema.get_type('TestInput') + test_enum = extended_twice_schema.get_type('TestEnum') + test_union = extended_twice_schema.get_type('TestUnion') + test_interface = extended_twice_schema.get_type('TestInterface') + test_type = extended_twice_schema.get_type('TestType') + test_directive = extended_twice_schema.get_directive('test') + + assert len(query.extension_ast_nodes) == 2 + assert len(some_scalar.extension_ast_nodes) == 2 + assert len(some_enum.extension_ast_nodes) == 2 + assert len(some_union.extension_ast_nodes) == 2 + assert len(some_input.extension_ast_nodes) == 2 + assert len(some_interface.extension_ast_nodes) == 2 + + assert test_type.extension_ast_nodes is None + assert test_enum.extension_ast_nodes is None + assert test_union.extension_ast_nodes is None + assert test_input.extension_ast_nodes is None + assert test_interface.extension_ast_nodes is None + + restored_extension_ast = DocumentNode( + definitions=[ + *query.extension_ast_nodes, + *some_scalar.extension_ast_nodes, + *some_enum.extension_ast_nodes, + *some_union.extension_ast_nodes, + *some_input.extension_ast_nodes, + *some_interface.extension_ast_nodes, + test_input.ast_node, + test_enum.ast_node, + test_union.ast_node, + test_interface.ast_node, + test_type.ast_node, + test_directive.ast_node]) + + assert print_schema( + extend_schema(test_schema, restored_extension_ast) + ) == print_schema(extended_twice_schema) + + new_field = query.fields['newField'] + assert print_ast( + new_field.ast_node) == 'newField(testArg: TestInput): TestEnum' + assert print_ast( + new_field.args['testArg'].ast_node) == 'testArg: TestInput' + assert print_ast( + query.fields['oneMoreNewField'].ast_node + ) == 'oneMoreNewField: TestUnion' + assert print_ast(some_enum.values['NEW_VALUE'].ast_node) == 'NEW_VALUE' + assert print_ast(some_enum.values[ + 'ONE_MORE_NEW_VALUE'].ast_node) == 'ONE_MORE_NEW_VALUE' + assert print_ast(some_input.fields[ + 'newField'].ast_node) == 'newField: String' + assert print_ast(some_input.fields[ + 'oneMoreNewField'].ast_node) == 'oneMoreNewField: String' + assert print_ast(some_interface.fields[ + 'newField'].ast_node) == 'newField: String' + assert print_ast(some_interface.fields[ + 'oneMoreNewField'].ast_node) == 'oneMoreNewField: String' + + assert print_ast( + test_input.fields['testInputField'].ast_node + ) == 'testInputField: TestEnum' + assert print_ast( + test_enum.values['TEST_VALUE'].ast_node) == 'TEST_VALUE' + assert print_ast( + test_interface.fields['interfaceField'].ast_node + ) == 'interfaceField: String' + assert print_ast( + test_type.fields['interfaceField'].ast_node + ) == 'interfaceField: String' + assert print_ast(test_directive.args['arg'].ast_node) == 'arg: Int' + + def builds_types_with_deprecated_fields_and_values(): + extended_schema = extend_test_schema(""" + type TypeWithDeprecatedField { + newDeprecatedField: String @deprecated(reason: "not used anymore") + } + + enum EnumWithDeprecatedValue { + DEPRECATED @deprecated(reason: "do not use") + } + """) # noqa + deprecated_field_def = extended_schema.get_type( + 'TypeWithDeprecatedField').fields['newDeprecatedField'] + assert deprecated_field_def.is_deprecated is True + assert deprecated_field_def.deprecation_reason == 'not used anymore' + + deprecated_enum_def = extended_schema.get_type( + 'EnumWithDeprecatedValue').values['DEPRECATED'] + assert deprecated_enum_def.is_deprecated is True + assert deprecated_enum_def.deprecation_reason == 'do not use' + + def extends_objects_with_deprecated_fields(): + extended_schema = extend_test_schema(""" + extend type Foo { + deprecatedField: String @deprecated(reason: "not used anymore") + } + """) + deprecated_field_def = extended_schema.get_type( + 'Foo').fields['deprecatedField'] + assert deprecated_field_def.is_deprecated is True + assert deprecated_field_def.deprecation_reason == 'not used anymore' + + def extend_enums_with_deprecated_values(): + extended_schema = extend_test_schema(""" + extend enum SomeEnum { + DEPRECATED @deprecated(reason: "do not use") + } + """) + + deprecated_enum_def = extended_schema.get_type( + 'SomeEnum').values['DEPRECATED'] + assert deprecated_enum_def.is_deprecated is True + assert deprecated_enum_def.deprecation_reason == 'do not use' + + def adds_new_unused_object_type(): + extended_schema = extend_test_schema(""" + type Unused { + someField: String + } + """) + assert extended_schema != test_schema + assert print_test_schema_changes(extended_schema) == dedent(""" + type Unused { + someField: String + } + """) + + def adds_new_unused_enum_type(): + extended_schema = extend_test_schema(""" + enum UnusedEnum { + SOME + } + """) + assert extended_schema != test_schema + assert print_test_schema_changes(extended_schema) == dedent(""" + enum UnusedEnum { + SOME + } + """) + + def adds_new_unused_input_object_type(): + extended_schema = extend_test_schema(""" + input UnusedInput { + someInput: String + } + """) + assert extended_schema != test_schema + assert print_test_schema_changes(extended_schema) == dedent(""" + input UnusedInput { + someInput: String + } + """) + + def adds_new_union_using_new_object_type(): + extended_schema = extend_test_schema(""" + type DummyUnionMember { + someField: String + } + + union UnusedUnion = DummyUnionMember + """) + assert extended_schema != test_schema + assert print_test_schema_changes(extended_schema) == dedent(""" + type DummyUnionMember { + someField: String + } + + union UnusedUnion = DummyUnionMember + """) + + def extends_objects_by_adding_new_fields_with_arguments(): + extended_schema = extend_test_schema(""" + extend type Foo { + newField(arg1: String, arg2: NewInputObj!): String + } + + input NewInputObj { + field1: Int + field2: [Float] + field3: String! + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + type Foo implements SomeInterface { + name: String + some: SomeInterface + tree: [Foo]! + newField(arg1: String, arg2: NewInputObj!): String + } + + input NewInputObj { + field1: Int + field2: [Float] + field3: String! + } + """) + + def extends_objects_by_adding_new_fields_with_existing_types(): + extended_schema = extend_test_schema(""" + extend type Foo { + newField(arg1: SomeEnum!): SomeEnum + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + type Foo implements SomeInterface { + name: String + some: SomeInterface + tree: [Foo]! + newField(arg1: SomeEnum!): SomeEnum + } + """) + + def extends_objects_by_adding_implemented_interfaces(): + extended_schema = extend_test_schema(""" + extend type Biz implements SomeInterface { + name: String + some: SomeInterface + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + type Biz implements SomeInterface { + fizz: String + name: String + some: SomeInterface + } + """) + + def extends_objects_by_including_new_types(): + extended_schema = extend_test_schema(""" + extend type Foo { + newObject: NewObject + newInterface: NewInterface + newUnion: NewUnion + newScalar: NewScalar + newEnum: NewEnum + newTree: [Foo]! + } + + type NewObject implements NewInterface { + baz: String + } + + type NewOtherObject { + fizz: Int + } + + interface NewInterface { + baz: String + } + + union NewUnion = NewObject | NewOtherObject + + scalar NewScalar + + enum NewEnum { + OPTION_A + OPTION_B + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + type Foo implements SomeInterface { + name: String + some: SomeInterface + tree: [Foo]! + newObject: NewObject + newInterface: NewInterface + newUnion: NewUnion + newScalar: NewScalar + newEnum: NewEnum + newTree: [Foo]! + } + + enum NewEnum { + OPTION_A + OPTION_B + } + + interface NewInterface { + baz: String + } + + type NewObject implements NewInterface { + baz: String + } + + type NewOtherObject { + fizz: Int + } + + scalar NewScalar + + union NewUnion = NewObject | NewOtherObject + """) + + def extends_objects_by_adding_implemented_new_interfaces(): + extended_schema = extend_test_schema(""" + extend type Foo implements NewInterface { + baz: String + } + + interface NewInterface { + baz: String + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + type Foo implements SomeInterface & NewInterface { + name: String + some: SomeInterface + tree: [Foo]! + baz: String + } + + interface NewInterface { + baz: String + } + """) + + def extends_different_types_multiple_times(): + extended_schema = extend_test_schema(""" + extend type Biz implements NewInterface { + buzz: String + } + + extend type Biz implements SomeInterface { + name: String + some: SomeInterface + newFieldA: Int + } + + extend type Biz { + newFieldA: Int + newFieldB: Float + } + + interface NewInterface { + buzz: String + } + + extend enum SomeEnum { + THREE + } + + extend enum SomeEnum { + FOUR + } + + extend union SomeUnion = Boo + + extend union SomeUnion = Joo + + type Boo { + fieldA: String + } + + type Joo { + fieldB: String + } + + extend input SomeInput { + fieldA: String + } + + extend input SomeInput { + fieldB: String + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + type Biz implements NewInterface & SomeInterface { + fizz: String + buzz: String + name: String + some: SomeInterface + newFieldA: Int + newFieldB: Float + } + + type Boo { + fieldA: String + } + + type Joo { + fieldB: String + } + + interface NewInterface { + buzz: String + } + + enum SomeEnum { + ONE + TWO + THREE + FOUR + } + + input SomeInput { + fooArg: String + fieldA: String + fieldB: String + } + + union SomeUnion = Foo | Biz | Boo | Joo + """) + + def extends_interfaces_by_adding_new_fields(): + extended_schema = extend_test_schema(""" + extend interface SomeInterface { + newField: String + } + + extend type Bar { + newField: String + } + + extend type Foo { + newField: String + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + type Bar implements SomeInterface { + name: String + some: SomeInterface + foo: Foo + newField: String + } + + type Foo implements SomeInterface { + name: String + some: SomeInterface + tree: [Foo]! + newField: String + } + + interface SomeInterface { + name: String + some: SomeInterface + newField: String + } + """) + + def allows_extension_of_interface_with_missing_object_fields(): + extended_schema = extend_test_schema(""" + extend interface SomeInterface { + newField: String + } + """) + errors = validate_schema(extended_schema) + assert errors + assert print_test_schema_changes(extended_schema) == dedent(""" + interface SomeInterface { + name: String + some: SomeInterface + newField: String + } + """) + + def extends_interfaces_multiple_times(): + extended_schema = extend_test_schema(""" + extend interface SomeInterface { + newFieldA: Int + } + + extend interface SomeInterface { + newFieldB(test: Boolean): String + } + """) + assert print_test_schema_changes(extended_schema) == dedent(""" + interface SomeInterface { + name: String + some: SomeInterface + newFieldA: Int + newFieldB(test: Boolean): String + } + """) + + def may_extend_mutations_and_subscriptions(): + mutationSchema = GraphQLSchema( + query=GraphQLObjectType( + name='Query', fields=lambda: { + 'queryField': GraphQLField(GraphQLString)}), + mutation=GraphQLObjectType( + name='Mutation', fields=lambda: { + 'mutationField': GraphQLField(GraphQLString)}), + subscription=GraphQLObjectType( + name='Subscription', fields=lambda: { + 'subscriptionField': GraphQLField(GraphQLString)})) + + ast = parse(""" + extend type Query { + newQueryField: Int + } + + extend type Mutation { + newMutationField: Int + } + + extend type Subscription { + newSubscriptionField: Int + } + """) + original_print = print_schema(mutationSchema) + extended_schema = extend_schema(mutationSchema, ast) + assert extended_schema != mutationSchema + assert print_schema(mutationSchema) == original_print + assert print_schema(extended_schema) == dedent(""" + type Mutation { + mutationField: String + newMutationField: Int + } + + type Query { + queryField: String + newQueryField: Int + } + + type Subscription { + subscriptionField: String + newSubscriptionField: Int + } + """) + + def may_extend_directives_with_new_simple_directive(): + extended_schema = extend_test_schema(""" + directive @neat on QUERY + """) + + new_directive = extended_schema.get_directive('neat') + assert new_directive.name == 'neat' + assert DirectiveLocation.QUERY in new_directive.locations + + def sets_correct_description_when_extending_with_a_new_directive(): + extended_schema = extend_test_schema(''' + """ + new directive + """ + directive @new on QUERY + ''') + + new_directive = extended_schema.get_directive('new') + assert new_directive.description == 'new directive' + + def may_extend_directives_with_new_complex_directive(): + extended_schema = extend_test_schema(""" + directive @profile(enable: Boolean! tag: String) on QUERY | FIELD + """) + + extended_directive = extended_schema.get_directive('profile') + assert extended_directive.name == 'profile' + assert DirectiveLocation.QUERY in extended_directive.locations + assert DirectiveLocation.FIELD in extended_directive.locations + + args = extended_directive.args + assert list(args.keys()) == ['enable', 'tag'] + arg0, arg1 = args.values() + assert is_non_null_type(arg0.type) is True + assert is_scalar_type(arg0.type.of_type) is True + assert is_scalar_type(arg1.type) is True + + def does_not_allow_replacing_a_default_directive(): + sdl = """ + directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(sdl) + assert str(exc_info.value).startswith( + "Directive 'include' already exists in the schema." + ' It cannot be redefined.') + + def does_not_allow_replacing_a_custom_directive(): + extended_schema = extend_test_schema(""" + directive @meow(if: Boolean!) on FIELD | FRAGMENT_SPREAD + """) + + replacement_ast = parse(""" + directive @meow(if: Boolean!) on FIELD | QUERY + """) + + with raises(GraphQLError) as exc_info: + extend_schema(extended_schema, replacement_ast) + assert str(exc_info.value).startswith( + "Directive 'meow' already exists in the schema." + ' It cannot be redefined.') + + def does_not_allow_replacing_an_existing_type(): + def existing_type_error(type_): + return (f"Type '{type_}' already exists in the schema." + ' It cannot also be defined in this type definition.') + + type_sdl = """ + type Bar + """ + with raises(GraphQLError) as exc_info: + assert extend_test_schema(type_sdl) + assert str(exc_info.value).startswith(existing_type_error('Bar')) + + scalar_sdl = """ + scalar SomeScalar + """ + with raises(GraphQLError) as exc_info: + assert extend_test_schema(scalar_sdl) + assert str(exc_info.value).startswith( + existing_type_error('SomeScalar')) + + enum_sdl = """ + enum SomeEnum + """ + with raises(GraphQLError) as exc_info: + assert extend_test_schema(enum_sdl) + assert str(exc_info.value).startswith(existing_type_error('SomeEnum')) + + union_sdl = """ + union SomeUnion + """ + with raises(GraphQLError) as exc_info: + assert extend_test_schema(union_sdl) + assert str(exc_info.value).startswith(existing_type_error('SomeUnion')) + + input_sdl = """ + input SomeInput + """ + with raises(GraphQLError) as exc_info: + assert extend_test_schema(input_sdl) + assert str(exc_info.value).startswith(existing_type_error('SomeInput')) + + def does_not_allow_replacing_an_existing_field(): + def existing_field_error(type_, field): + return (f"Field '{type_}.{field}' already exists in the schema." + ' It cannot also be defined in this type extension.') + + type_sdl = """ + extend type Bar { + foo: Foo + } + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(type_sdl) + assert str(exc_info.value).startswith( + existing_field_error('Bar', 'foo')) + + interface_sdl = """ + extend interface SomeInterface { + some: Foo + } + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(interface_sdl) + assert str(exc_info.value).startswith( + existing_field_error('SomeInterface', 'some')) + + input_sdl = """ + extend input SomeInput { + fooArg: String + } + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(input_sdl) + assert str(exc_info.value).startswith( + existing_field_error('SomeInput', 'fooArg')) + + def does_not_allow_replacing_an_existing_enum_value(): + sdl = """ + extend enum SomeEnum { + ONE + } + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(sdl) + assert str(exc_info.value).startswith( + "Enum value 'SomeEnum.ONE' already exists in the schema." + ' It cannot also be defined in this type extension.') + + def does_not_allow_referencing_an_unknown_type(): + unknown_type_error = ( + "Unknown type: 'Quix'. Ensure that this type exists either" + ' in the original schema, or is added in a type definition.') + + type_sdl = """ + extend type Bar { + quix: Quix + } + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(type_sdl) + assert str(exc_info.value).startswith(unknown_type_error) + + interface_sdl = """ + extend interface SomeInterface { + quix: Quix + } + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(interface_sdl) + assert str(exc_info.value).startswith(unknown_type_error) + + input_sdl = """ + extend input SomeInput { + quix: Quix + } + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(input_sdl) + assert str(exc_info.value).startswith(unknown_type_error) + + def does_not_allow_extending_an_unknown_type(): + for sdl in [ + 'extend scalar UnknownType @foo', + 'extend type UnknownType @foo', + 'extend interface UnknownType @foo', + 'extend enum UnknownType @foo', + 'extend union UnknownType @foo', + 'extend input UnknownType @foo']: + with raises(GraphQLError) as exc_info: + extend_test_schema(sdl) + assert str(exc_info.value).startswith( + "Cannot extend type 'UnknownType'" + ' because it does not exist in the existing schema.') + + def it_does_not_allow_extending_a_mismatch_type(): + type_sdl = """ + extend type SomeInterface @foo + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(type_sdl) + assert str(exc_info.value).startswith( + "Cannot extend non-object type 'SomeInterface'.") + + interface_sdl = """ + extend interface Foo @foo + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(interface_sdl) + assert str(exc_info.value).startswith( + "Cannot extend non-interface type 'Foo'.") + + enum_sdl = """ + extend enum Foo @foo + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(enum_sdl) + assert str(exc_info.value).startswith( + "Cannot extend non-enum type 'Foo'.") + + union_sdl = """ + extend union Foo @foo + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(union_sdl) + assert str(exc_info.value).startswith( + "Cannot extend non-union type 'Foo'.") + + input_sdl = """ + extend input Foo @foo + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(input_sdl) + assert str(exc_info.value).startswith( + "Cannot extend non-input object type 'Foo'.") + + def describe_can_add_additional_root_operation_types(): + + def does_not_automatically_include_common_root_type_names(): + schema = extend_test_schema(""" + type Mutation { + doSomething: String + } + """) + assert schema.mutation_type is None + + def does_not_allow_new_schema_within_an_extension(): + sdl = """ + schema { + mutation: Mutation + } + + type Mutation { + doSomething: String + } + """ + with raises(GraphQLError) as exc_info: + extend_test_schema(sdl) + assert str(exc_info.value).startswith( + 'Cannot define a new schema within a schema extension.') + + def adds_new_root_types_via_schema_extension(): + schema = extend_test_schema(""" + extend schema { + mutation: Mutation + } + + type Mutation { + doSomething: String + } + """) + mutation_type = schema.mutation_type + assert mutation_type.name == 'Mutation' + + def adds_multiple_new_root_types_via_schema_extension(): + schema = extend_test_schema(""" + extend schema { + mutation: Mutation + subscription: Subscription + } + + type Mutation { + doSomething: String + } + + type Subscription { + hearSomething: String + } + """) + mutation_type = schema.mutation_type + subscription_type = schema.subscription_type + assert mutation_type.name == 'Mutation' + assert subscription_type.name == 'Subscription' + + def applies_multiple_schema_extensions(): + schema = extend_test_schema(""" + extend schema { + mutation: Mutation + } + + extend schema { + subscription: Subscription + } + + type Mutation { + doSomething: String + } + + type Subscription { + hearSomething: String + } + """) + mutation_type = schema.mutation_type + subscription_type = schema.subscription_type + assert mutation_type.name == 'Mutation' + assert subscription_type.name == 'Subscription' + + def schema_extension_ast_are_available_from_schema_object(): + schema = extend_test_schema(""" + extend schema { + mutation: Mutation + } + + extend schema { + subscription: Subscription + } + + type Mutation { + doSomething: String + } + + type Subscription { + hearSomething: String + } + """) + + ast = parse(""" + extend schema @foo + """) + schema = extend_schema(schema, ast) + + nodes = schema.extension_ast_nodes + assert ''.join( + print_ast(node) + '\n' for node in nodes) == dedent(""" + extend schema { + mutation: Mutation + } + extend schema { + subscription: Subscription + } + extend schema @foo + """) + + def does_not_allow_redefining_an_existing_root_type(): + sdl = """ + extend schema { + query: SomeType + } + + type SomeType { + seeSomething: String + } + """ + with raises(TypeError) as exc_info: + extend_test_schema(sdl) + assert str(exc_info.value).startswith( + 'Must provide only one query type in schema.') + + def does_not_allow_defining_a_root_operation_type_twice(): + sdl = """ + extend schema { + mutation: Mutation + } + + extend schema { + mutation: Mutation + } + + type Mutation { + doSomething: String + } + """ + with raises(TypeError) as exc_info: + extend_test_schema(sdl) + assert str(exc_info.value).startswith( + 'Must provide only one mutation type in schema.') + + def does_not_allow_defining_root_operation_type_with_different_types(): + sdl = """ + extend schema { + mutation: Mutation + } + + extend schema { + mutation: SomethingElse + } + + type Mutation { + doSomething: String + } + + type SomethingElse { + doSomethingElse: String + } + """ + with raises(TypeError) as exc_info: + extend_test_schema(sdl) + assert str(exc_info.value).startswith( + 'Must provide only one mutation type in schema.') diff --git a/tests/utilities/test_find_breaking_changes.py b/tests/utilities/test_find_breaking_changes.py new file mode 100644 index 00000000..c8a42fd4 --- /dev/null +++ b/tests/utilities/test_find_breaking_changes.py @@ -0,0 +1,1033 @@ +from graphql.language import DirectiveLocation +from graphql.type import ( + GraphQLSchema, GraphQLDirective, GraphQLDeprecatedDirective, + GraphQLIncludeDirective, GraphQLSkipDirective) +from graphql.utilities import ( + BreakingChangeType, DangerousChangeType, + build_schema, find_breaking_changes, find_dangerous_changes) +from graphql.utilities.find_breaking_changes import ( + find_removed_types, find_types_that_changed_kind, + find_fields_that_changed_type_on_object_or_interface_types, + find_fields_that_changed_type_on_input_object_types, + find_types_removed_from_unions, find_values_removed_from_enums, + find_arg_changes, find_interfaces_removed_from_object_types, + find_removed_directives, find_removed_directive_args, + find_added_non_null_directive_args, find_removed_locations_for_directive, + find_removed_directive_locations, find_values_added_to_enums, + find_interfaces_added_to_object_types, find_types_added_to_unions) + + +def describe_find_breaking_changes(): + + def should_detect_if_a_type_was_removed_or_not(): + old_schema = build_schema(""" + type Type1 { + field1: String + } + + type Type2 { + field1: String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type Type2 { + field1: String + } + + type Query { + field1: String + } + """) + + assert find_removed_types(old_schema, new_schema) == [ + (BreakingChangeType.TYPE_REMOVED, 'Type1 was removed.')] + assert find_removed_types(old_schema, old_schema) == [] + + def should_detect_if_a_type_changed_its_type(): + old_schema = build_schema(""" + interface Type1 { + field1: String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type ObjectType { + field1: String + } + + union Type1 = ObjectType + + type Query { + field1: String + } + """) + + assert find_types_that_changed_kind(old_schema, new_schema) == [ + (BreakingChangeType.TYPE_CHANGED_KIND, + 'Type1 changed from an Interface type to a Union type.')] + + def should_detect_if_a_field_on_type_was_deleted_or_changed_type(): + old_schema = build_schema(""" + type TypeA { + field1: String + } + + interface Type1 { + field1: TypeA + field2: String + field3: String + field4: TypeA + field6: String + field7: [String] + field8: Int + field9: Int! + field10: [Int]! + field11: Int + field12: [Int] + field13: [Int!] + field14: [Int] + field15: [[Int]] + field16: Int! + field17: [Int] + field18: [[Int!]!] + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type TypeA { + field1: String + } + + type TypeB { + field1: String + } + + interface Type1 { + field1: TypeA + field3: Boolean + field4: TypeB + field5: String + field6: [String] + field7: String + field8: Int! + field9: Int + field10: [Int] + field11: [Int]! + field12: [Int!] + field13: [Int] + field14: [[Int]] + field15: [Int] + field16: [Int]! + field17: [Int]! + field18: [[Int!]] + } + + type Query { + field1: String + } + """) + + expected_field_changes = [ + (BreakingChangeType.FIELD_REMOVED, 'Type1.field2 was removed.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field3 changed type from String to Boolean.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field4 changed type from TypeA to TypeB.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field6 changed type from String to [String].'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field7 changed type from [String] to String.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field9 changed type from Int! to Int.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field10 changed type from [Int]! to [Int].'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field11 changed type from Int to [Int]!.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field13 changed type from [Int!] to [Int].'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field14 changed type from [Int] to [[Int]].'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field15 changed type from [[Int]] to [Int].'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field16 changed type from Int! to [Int]!.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'Type1.field18 changed type from [[Int!]!] to [[Int!]].')] + + assert find_fields_that_changed_type_on_object_or_interface_types( + old_schema, new_schema) == expected_field_changes + + def should_detect_if_fields_on_input_types_changed_kind_or_were_removed(): + old_schema = build_schema(""" + input InputType1 { + field1: String + field2: Boolean + field3: [String] + field4: String! + field5: String + field6: [Int] + field7: [Int]! + field8: Int + field9: [Int] + field10: [Int!] + field11: [Int] + field12: [[Int]] + field13: Int! + field14: [[Int]!] + field15: [[Int]!] + } + + type Query { + field1: String + }""") + + new_schema = build_schema(""" + input InputType1 { + field1: Int + field3: String + field4: String + field5: String! + field6: [Int]! + field7: [Int] + field8: [Int]! + field9: [Int!] + field10: [Int] + field11: [[Int]] + field12: [Int] + field13: [Int]! + field14: [[Int]] + field15: [[Int!]!] + } + + type Query { + field1: String + } + """) + + expected_field_changes = [ + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field1 changed type from String to Int.'), + (BreakingChangeType.FIELD_REMOVED, + 'InputType1.field2 was removed.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field3 changed type from [String] to String.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field5 changed type from String to String!.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field6 changed type from [Int] to [Int]!.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field8 changed type from Int to [Int]!.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field9 changed type from [Int] to [Int!].'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field11 changed type from [Int] to [[Int]].'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field12 changed type from [[Int]] to [Int].'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field13 changed type from Int! to [Int]!.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'InputType1.field15 changed type from [[Int]!] to [[Int!]!].')] + + assert find_fields_that_changed_type_on_input_object_types( + old_schema, new_schema).breaking_changes == expected_field_changes + + def should_detect_if_a_non_null_field_is_added_to_an_input_type(): + old_schema = build_schema(""" + input InputType1 { + field1: String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + input InputType1 { + field1: String + requiredField: Int! + optionalField: Boolean + } + + type Query { + field1: String + } + """) + + expected_field_changes = [ + (BreakingChangeType.NON_NULL_INPUT_FIELD_ADDED, + 'A non-null field requiredField on input type' + ' InputType1 was added.')] + + assert find_fields_that_changed_type_on_input_object_types( + old_schema, new_schema).breaking_changes == expected_field_changes + + def should_detect_if_a_type_was_removed_from_a_union_type(): + old_schema = build_schema(""" + type Type1 { + field1: String + } + + type Type2 { + field1: String + } + + union UnionType1 = Type1 | Type2 + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type Type1 { + field1: String + } + + type Type3 { + field1: String + } + + union UnionType1 = Type1 | Type3 + + type Query { + field1: String + } + """) + + assert find_types_removed_from_unions(old_schema, new_schema) == [ + (BreakingChangeType.TYPE_REMOVED_FROM_UNION, + 'Type2 was removed from union type UnionType1.')] + + def should_detect_if_a_value_was_removed_from_an_enum_type(): + old_schema = build_schema(""" + enum EnumType1 { + VALUE0 + VALUE1 + VALUE2 + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + enum EnumType1 { + VALUE0 + VALUE2 + VALUE3 + } + + type Query { + field1: String + } + """) + + assert find_values_removed_from_enums(old_schema, new_schema) == [ + (BreakingChangeType.VALUE_REMOVED_FROM_ENUM, + 'VALUE1 was removed from enum type EnumType1.')] + + def should_detect_if_a_field_argument_was_removed(): + old_schema = build_schema(""" + input InputType1 { + field1: String + } + + interface Interface1 { + field1(arg1: Boolean, objectArg: InputType1): String + } + + type Type1 { + field1(name: String): String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + interface Interface1 { + field1: String + } + + type Type1 { + field1: String + } + + type Query { + field1: String + } + """) + + assert find_arg_changes(old_schema, new_schema).breaking_changes == [ + (BreakingChangeType.ARG_REMOVED, + 'Interface1.field1 arg arg1 was removed'), + (BreakingChangeType.ARG_REMOVED, + 'Interface1.field1 arg objectArg was removed'), + (BreakingChangeType.ARG_REMOVED, + 'Type1.field1 arg name was removed')] + + def should_detect_if_a_field_argument_has_changed_type(): + old_schema = build_schema(""" + type Type1 { + field1( + arg1: String + arg2: String + arg3: [String] + arg4: String + arg5: String! + arg6: String! + arg7: [Int]! + arg8: Int + arg9: [Int] + arg10: [Int!] + arg11: [Int] + arg12: [[Int]] + arg13: Int! + arg14: [[Int]!] + arg15: [[Int]!] + ): String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type Type1 { + field1( + arg1: Int + arg2: [String] + arg3: String + arg4: String! + arg5: Int + arg6: Int! + arg7: [Int] + arg8: [Int]! + arg9: [Int!] + arg10: [Int] + arg11: [[Int]] + arg12: [Int] + arg13: [Int]! + arg14: [[Int]] + arg15: [[Int!]!] + ): String + } + + type Query { + field1: String + } + """) + + assert find_arg_changes(old_schema, new_schema).breaking_changes == [ + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg1 has changed type from String to Int'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg2 has changed type from String to [String]'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg3 has changed type from [String] to String'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg4 has changed type from String to String!'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg5 has changed type from String! to Int'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg6 has changed type from String! to Int!'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg8 has changed type from Int to [Int]!'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg9 has changed type from [Int] to [Int!]'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg11 has changed type from [Int] to [[Int]]'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg12 has changed type from [[Int]] to [Int]'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg13 has changed type from Int! to [Int]!'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'Type1.field1 arg arg15 has changed type from [[Int]!]' + ' to [[Int!]!]')] + + def should_detect_if_a_non_null_field_argument_was_added(): + old_schema = build_schema(""" + type Type1 { + field1(arg1: String): String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type Type1 { + field1(arg1: String, newRequiredArg: String!, newOptionalArg: Int): String + } + + type Query { + field1: String + } + """) # noqa + + assert find_arg_changes(old_schema, new_schema).breaking_changes == [ + (BreakingChangeType.NON_NULL_ARG_ADDED, + 'A non-null arg newRequiredArg on Type1.field1 was added')] + + def should_not_flag_args_with_the_same_type_signature_as_breaking(): + old_schema = build_schema(""" + input InputType1 { + field1: String + } + + type Type1 { + field1(arg1: Int!, arg2: InputType1): Int + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + input InputType1 { + field1: String + } + + type Type1 { + field1(arg1: Int!, arg2: InputType1): Int + } + + type Query { + field1: String + } + """) + + assert find_arg_changes(old_schema, new_schema).breaking_changes == [] + + def should_consider_args_that_move_away_from_non_null_as_non_breaking(): + old_schema = build_schema(""" + type Type1 { + field1(name: String!): String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type Type1 { + field1(name: String): String + } + + type Query { + field1: String + } + """) + + assert find_arg_changes(old_schema, new_schema).breaking_changes == [] + + def should_detect_interfaces_removed_from_types(): + old_schema = build_schema(""" + interface Interface1 { + field1: String + } + + type Type1 implements Interface1 { + field1: String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type Type1 { + field1: String + } + + type Query { + field1: String + } + """) + + assert find_interfaces_removed_from_object_types( + old_schema, new_schema) == [ + (BreakingChangeType.INTERFACE_REMOVED_FROM_OBJECT, + 'Type1 no longer implements interface Interface1.')] + + def should_detect_all_breaking_changes(): + old_schema = build_schema(""" + directive @DirectiveThatIsRemoved on FIELD_DEFINITION + + directive @DirectiveThatRemovesArg(arg1: String) on FIELD_DEFINITION + + directive @NonNullDirectiveAdded on FIELD_DEFINITION + + directive @DirectiveName on FIELD_DEFINITION | QUERY + + type ArgThatChanges { + field1(id: Int): String + } + + enum EnumTypeThatLosesAValue { + VALUE0 + VALUE1 + VALUE2 + } + + interface Interface1 { + field1: String + } + + type TypeThatGainsInterface1 implements Interface1 { + field1: String + } + + type TypeInUnion1 { + field1: String + } + + type TypeInUnion2 { + field1: String + } + + union UnionTypeThatLosesAType = TypeInUnion1 | TypeInUnion2 + + type TypeThatChangesType { + field1: String + } + + type TypeThatGetsRemoved { + field1: String + } + + interface TypeThatHasBreakingFieldChanges { + field1: String + field2: String + } + + type Query { + field1: String + } + """) # noqa + + new_schema = build_schema(""" + directive @DirectiveThatRemovesArg on FIELD_DEFINITION + + directive @NonNullDirectiveAdded(arg1: Boolean!) on FIELD_DEFINITION + + directive @DirectiveName on FIELD_DEFINITION + + type ArgThatChanges { + field1(id: String): String + } + + enum EnumTypeThatLosesAValue { + VALUE1 + VALUE2 + } + + interface Interface1 { + field1: String + } + + type TypeInUnion1 { + field1: String + } + + union UnionTypeThatLosesAType = TypeInUnion1 + + interface TypeThatChangesType { + field1: String + } + + type TypeThatGainsInterface1 { + field1: String + } + + interface TypeThatHasBreakingFieldChanges { + field2: Boolean + } + + type Query { + field1: String + } + """) # noqa + + expected_breaking_changes = [ + (BreakingChangeType.TYPE_REMOVED, + 'Int was removed.'), + (BreakingChangeType.TYPE_REMOVED, + 'TypeInUnion2 was removed.'), + (BreakingChangeType.TYPE_REMOVED, + 'TypeThatGetsRemoved was removed.'), + (BreakingChangeType.TYPE_CHANGED_KIND, + 'TypeThatChangesType changed from an Object type to an' + ' Interface type.'), + (BreakingChangeType.FIELD_REMOVED, + 'TypeThatHasBreakingFieldChanges.field1 was removed.'), + (BreakingChangeType.FIELD_CHANGED_KIND, + 'TypeThatHasBreakingFieldChanges.field2 changed type' + ' from String to Boolean.'), + (BreakingChangeType.TYPE_REMOVED_FROM_UNION, + 'TypeInUnion2 was removed from union type' + ' UnionTypeThatLosesAType.'), + (BreakingChangeType.VALUE_REMOVED_FROM_ENUM, + 'VALUE0 was removed from enum type EnumTypeThatLosesAValue.'), + (BreakingChangeType.ARG_CHANGED_KIND, + 'ArgThatChanges.field1 arg id has changed' + ' type from Int to String'), + (BreakingChangeType.INTERFACE_REMOVED_FROM_OBJECT, + 'TypeThatGainsInterface1 no longer implements' + ' interface Interface1.'), + (BreakingChangeType.DIRECTIVE_REMOVED, + 'DirectiveThatIsRemoved was removed'), + (BreakingChangeType.DIRECTIVE_ARG_REMOVED, + 'arg1 was removed from DirectiveThatRemovesArg'), + (BreakingChangeType.NON_NULL_DIRECTIVE_ARG_ADDED, + 'A non-null arg arg1 on directive' + ' NonNullDirectiveAdded was added'), + (BreakingChangeType.DIRECTIVE_LOCATION_REMOVED, + 'QUERY was removed from DirectiveName')] + + assert find_breaking_changes( + old_schema, new_schema) == expected_breaking_changes + + def should_detect_if_a_directive_was_explicitly_removed(): + old_schema = build_schema(""" + directive @DirectiveThatIsRemoved on FIELD_DEFINITION + directive @DirectiveThatStays on FIELD_DEFINITION + """) + + new_schema = build_schema(""" + directive @DirectiveThatStays on FIELD_DEFINITION + """) + + assert find_removed_directives(old_schema, new_schema) == [ + (BreakingChangeType.DIRECTIVE_REMOVED, + 'DirectiveThatIsRemoved was removed')] + + def should_detect_if_a_directive_was_implicitly_removed(): + old_schema = GraphQLSchema() + + new_schema = GraphQLSchema( + directives=[GraphQLSkipDirective, GraphQLIncludeDirective]) + + assert find_removed_directives(old_schema, new_schema) == [ + (BreakingChangeType.DIRECTIVE_REMOVED, + f'{GraphQLDeprecatedDirective.name} was removed')] + + def should_detect_if_a_directive_argument_was_removed(): + old_schema = build_schema(""" + directive @DirectiveWithArg(arg1: Int) on FIELD_DEFINITION + """) + + new_schema = build_schema(""" + directive @DirectiveWithArg on FIELD_DEFINITION + """) + + assert find_removed_directive_args(old_schema, new_schema) == [ + (BreakingChangeType.DIRECTIVE_ARG_REMOVED, + 'arg1 was removed from DirectiveWithArg')] + + def should_detect_if_a_non_nullable_directive_argument_was_added(): + old_schema = build_schema(""" + directive @DirectiveName on FIELD_DEFINITION + """) + + new_schema = build_schema(""" + directive @DirectiveName(arg1: Boolean!) on FIELD_DEFINITION + """) + + assert find_added_non_null_directive_args(old_schema, new_schema) == [ + (BreakingChangeType.NON_NULL_DIRECTIVE_ARG_ADDED, + 'A non-null arg arg1 on directive DirectiveName was added')] + + def should_detect_locations_removed_from_a_directive(): + d1 = GraphQLDirective('Directive Name', locations=[ + DirectiveLocation.FIELD_DEFINITION, DirectiveLocation.QUERY]) + + d2 = GraphQLDirective('Directive Name', locations=[ + DirectiveLocation.FIELD_DEFINITION]) + + assert find_removed_locations_for_directive(d1, d2) == [ + DirectiveLocation.QUERY] + + def should_detect_locations_removed_directives_within_a_schema(): + old_schema = build_schema(""" + directive @DirectiveName on FIELD_DEFINITION | QUERY + """) + + new_schema = build_schema(""" + directive @DirectiveName on FIELD_DEFINITION + """) + + assert find_removed_directive_locations(old_schema, new_schema) == [ + (BreakingChangeType.DIRECTIVE_LOCATION_REMOVED, + 'QUERY was removed from DirectiveName')] + + +def describe_find_dangerous_changes(): + + def describe_find_arg_changes(): + + def should_detect_if_an_arguments_default_value_has_changed(): + old_schema = build_schema(""" + type Type1 { + field1(name: String = "test"): String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type Type1 { + field1(name: String = "Test"): String + } + + type Query { + field1: String + } + """) + + assert find_arg_changes( + old_schema, new_schema).dangerous_changes == [ + (DangerousChangeType.ARG_DEFAULT_VALUE_CHANGE, + 'Type1.field1 arg name has changed defaultValue')] + + def should_detect_if_a_value_was_added_to_an_enum_type(): + old_schema = build_schema(""" + enum EnumType1 { + VALUE0 + VALUE1 + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + enum EnumType1 { + VALUE0 + VALUE1 + VALUE2 + } + + type Query { + field1: String + } + """) + + assert find_values_added_to_enums(old_schema, new_schema) == [ + (DangerousChangeType.VALUE_ADDED_TO_ENUM, + 'VALUE2 was added to enum type EnumType1.')] + + def should_detect_interfaces_added_to_types(): + old_schema = build_schema(""" + type Type1 { + field1: String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + interface Interface1 { + field1: String + } + + type Type1 implements Interface1 { + field1: String + } + + type Query { + field1: String + } + """) + + assert find_interfaces_added_to_object_types( + old_schema, new_schema) == [ + (DangerousChangeType.INTERFACE_ADDED_TO_OBJECT, + 'Interface1 added to interfaces implemented by Type1.')] + + def should_detect_if_a_type_was_added_to_a_union_type(): + old_schema = build_schema(""" + type Type1 { + field1: String + } + + union UnionType1 = Type1 + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type Type1 { + field1: String + } + + type Type2 { + field1: String + } + + union UnionType1 = Type1 | Type2 + + type Query { + field1: String + } + """) + + assert find_types_added_to_unions(old_schema, new_schema) == [ + (DangerousChangeType.TYPE_ADDED_TO_UNION, + 'Type2 was added to union type UnionType1.')] + + def should_detect_if_a_nullable_field_was_added_to_an_input(): + old_schema = build_schema(""" + input InputType1 { + field1: String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + input InputType1 { + field1: String + field2: Int + } + + type Query { + field1: String + } + """) + + expected_field_changes = [ + (DangerousChangeType.NULLABLE_INPUT_FIELD_ADDED, + 'A nullable field field2 on input type InputType1 was added.')] + + assert find_fields_that_changed_type_on_input_object_types( + old_schema, new_schema).dangerous_changes == expected_field_changes + + def should_find_all_dangerous_changes(): + old_schema = build_schema(""" + enum EnumType1 { + VALUE0 + VALUE1 + } + + type Type1 { + field1(name: String = "test"): String + } + + type TypeThatGainsInterface1 { + field1: String + } + + type TypeInUnion1 { + field1: String + } + + union UnionTypeThatGainsAType = TypeInUnion1 + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + enum EnumType1 { + VALUE0 + VALUE1 + VALUE2 + } + + interface Interface1 { + field1: String + } + + type TypeThatGainsInterface1 implements Interface1 { + field1: String + } + + type Type1 { + field1(name: String = "Test"): String + } + + type TypeInUnion1 { + field1: String + } + + type TypeInUnion2 { + field1: String + } + + union UnionTypeThatGainsAType = TypeInUnion1 | TypeInUnion2 + + type Query { + field1: String + } + """) + + expected_dangerous_changes = [ + (DangerousChangeType.ARG_DEFAULT_VALUE_CHANGE, + 'Type1.field1 arg name has changed defaultValue'), + (DangerousChangeType.VALUE_ADDED_TO_ENUM, + 'VALUE2 was added to enum type EnumType1.'), + (DangerousChangeType.INTERFACE_ADDED_TO_OBJECT, + 'Interface1 added to interfaces implemented' + ' by TypeThatGainsInterface1.'), + (DangerousChangeType.TYPE_ADDED_TO_UNION, + 'TypeInUnion2 was added to union type UnionTypeThatGainsAType.')] + + assert find_dangerous_changes( + old_schema, new_schema) == expected_dangerous_changes + + def should_detect_if_a_nullable_field_argument_was_added(): + old_schema = build_schema(""" + type Type1 { + field1(arg1: String): String + } + + type Query { + field1: String + } + """) + + new_schema = build_schema(""" + type Type1 { + field1(arg1: String, arg2: String): String + } + + type Query { + field1: String + } + """) + + assert find_arg_changes(old_schema, new_schema).dangerous_changes == [ + (DangerousChangeType.NULLABLE_ARG_ADDED, + 'A nullable arg arg2 on Type1.field1 was added')] diff --git a/tests/utilities/test_find_deprecated_usages.py b/tests/utilities/test_find_deprecated_usages.py new file mode 100644 index 00000000..8e203b34 --- /dev/null +++ b/tests/utilities/test_find_deprecated_usages.py @@ -0,0 +1,43 @@ +from graphql.language import parse +from graphql.type import ( + GraphQLEnumType, GraphQLEnumValue, GraphQLSchema, GraphQLObjectType, + GraphQLField, GraphQLString, GraphQLArgument) +from graphql.utilities import find_deprecated_usages + + +def describe_find_deprecated_usages(): + + enum_type = GraphQLEnumType('EnumType', { + 'ONE': GraphQLEnumValue(), + 'TWO': GraphQLEnumValue(deprecation_reason='Some enum reason.')}) + + schema = GraphQLSchema(GraphQLObjectType('Query', { + 'normalField': GraphQLField(GraphQLString, args={ + 'enumArg': GraphQLArgument(enum_type)}), + 'deprecatedField': GraphQLField( + GraphQLString, deprecation_reason='Some field reason.')})) + + def should_report_empty_set_for_no_deprecated_usages(): + errors = find_deprecated_usages( + schema, parse('{ normalField(enumArg: ONE) }')) + + assert errors == [] + + def should_report_usage_of_deprecated_fields(): + errors = find_deprecated_usages( + schema, parse('{ normalField, deprecatedField }')) + + error_messages = [err.message for err in errors] + + assert error_messages == [ + 'The field Query.deprecatedField is deprecated.' + ' Some field reason.'] + + def should_report_usage_of_deprecated_enums(): + errors = find_deprecated_usages( + schema, parse('{ normalField(enumArg: TWO) }')) + + error_messages = [err.message for err in errors] + + assert error_messages == [ + 'The enum value EnumType.TWO is deprecated. Some enum reason.'] diff --git a/tests/utilities/test_get_operation_ast.py b/tests/utilities/test_get_operation_ast.py new file mode 100644 index 00000000..7c50db4c --- /dev/null +++ b/tests/utilities/test_get_operation_ast.py @@ -0,0 +1,55 @@ +from graphql.language import parse +from graphql.utilities import get_operation_ast + + +def describe_get_operation_ast(): + + def gets_an_operation_from_a_simple_document(): + doc = parse('{ field }') + assert get_operation_ast(doc) == doc.definitions[0] + + def gets_an_operation_from_a_document_with_named_op_mutation(): + doc = parse('mutation Test { field }') + assert get_operation_ast(doc) == doc.definitions[0] + + def gets_an_operation_from_a_document_with_named_op_subscription(): + doc = parse('subscription Test { field }') + assert get_operation_ast(doc) == doc.definitions[0] + + def does_not_get_missing_operation(): + doc = parse('type Foo { field: String }') + assert get_operation_ast(doc) is None + + def does_not_get_ambiguous_unnamed_operation(): + doc = parse(""" + { field } + mutation Test { field } + subscription TestSub { field } + """) + assert get_operation_ast(doc) is None + + def does_not_get_ambiguous_named_operation(): + doc = parse(""" + query TestQ { field } + mutation TestM { field } + subscription TestS { field } + """) + assert get_operation_ast(doc) is None + + def does_not_get_misnamed_operation(): + doc = parse(""" + query TestQ { field } + mutation TestM { field } + subscription TestS { field } + """) + assert get_operation_ast(doc, 'Unknown') is None + + def gets_named_operation(): + doc = parse(""" + query TestQ { field } + mutation TestM { field } + subscription TestS { field } + """) + assert get_operation_ast(doc, 'TestQ') == doc.definitions[0] + assert get_operation_ast(doc, 'TestM') == doc.definitions[1] + assert get_operation_ast(doc, 'TestS') == doc.definitions[2] diff --git a/tests/utilities/test_get_operation_root_type.py b/tests/utilities/test_get_operation_root_type.py new file mode 100644 index 00000000..98e7b15f --- /dev/null +++ b/tests/utilities/test_get_operation_root_type.py @@ -0,0 +1,111 @@ +from pytest import raises + +from graphql.error import GraphQLError +from graphql.language import ( + parse, OperationDefinitionNode, OperationTypeDefinitionNode, + SchemaDefinitionNode) +from graphql.type import ( + GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString) +from graphql.utilities import get_operation_root_type + + +query_type = GraphQLObjectType('FooQuery', { + 'field': GraphQLField(GraphQLString)}) + +mutation_type = GraphQLObjectType('FooMutation', { + 'field': GraphQLField(GraphQLString)}) + +subscription_type = GraphQLObjectType('FooSubscription', { + 'field': GraphQLField(GraphQLString)}) + + +def describe_get_operation_root_type(): + + def gets_a_query_type_for_an_unnamed_operation_definition_node(): + test_schema = GraphQLSchema(query_type) + doc = parse('{ field }') + operation = doc.definitions[0] + assert isinstance(operation, OperationDefinitionNode) + assert get_operation_root_type(test_schema, operation) is query_type + + def gets_a_query_type_for_a_named_operation_definition_node(): + test_schema = GraphQLSchema(query_type) + doc = parse('query Q { field }') + operation = doc.definitions[0] + assert isinstance(operation, OperationDefinitionNode) + assert get_operation_root_type(test_schema, operation) is query_type + + def gets_a_type_for_operation_definition_nodes(): + test_schema = GraphQLSchema( + query_type, mutation_type, subscription_type) + doc = parse('schema { query: FooQuery' + ' mutation: FooMutation subscription: FooSubscription }') + schema = doc.definitions[0] + assert isinstance(schema, SchemaDefinitionNode) + operations = schema.operation_types + operation = operations[0] + assert isinstance(operation, OperationTypeDefinitionNode) + assert get_operation_root_type(test_schema, operation) is query_type + operation = operations[1] + assert isinstance(operation, OperationTypeDefinitionNode) + assert get_operation_root_type(test_schema, operation) is mutation_type + operation = operations[2] + assert isinstance(operation, OperationTypeDefinitionNode) + assert get_operation_root_type( + test_schema, operation) is subscription_type + + def gets_a_mutation_type_for_an_operation_definition_node(): + test_schema = GraphQLSchema(mutation=mutation_type) + doc = parse('mutation { field }') + operation = doc.definitions[0] + assert isinstance(operation, OperationDefinitionNode) + assert get_operation_root_type(test_schema, operation) is mutation_type + + def gets_a_subscription_type_for_an_operation_definition_node(): + test_schema = GraphQLSchema(subscription=subscription_type) + doc = parse('subscription { field }') + operation = doc.definitions[0] + assert isinstance(operation, OperationDefinitionNode) + assert get_operation_root_type( + test_schema, operation) is subscription_type + + def throws_when_query_type_not_defined_in_schema(): + test_schema = GraphQLSchema() + doc = parse('query { field }') + operation = doc.definitions[0] + assert isinstance(operation, OperationDefinitionNode) + with raises(GraphQLError) as exc_info: + get_operation_root_type(test_schema, operation) + assert exc_info.value.message == ( + 'Schema does not define the required query root type.') + + def throws_when_mutation_type_not_defined_in_schema(): + test_schema = GraphQLSchema() + doc = parse('mutation { field }') + operation = doc.definitions[0] + assert isinstance(operation, OperationDefinitionNode) + with raises(GraphQLError) as exc_info: + get_operation_root_type(test_schema, operation) + assert exc_info.value.message == ( + 'Schema is not configured for mutations.') + + def throws_when_subscription_type_not_defined_in_schema(): + test_schema = GraphQLSchema() + doc = parse('subscription { field }') + operation = doc.definitions[0] + assert isinstance(operation, OperationDefinitionNode) + with raises(GraphQLError) as exc_info: + get_operation_root_type(test_schema, operation) + assert exc_info.value.message == ( + 'Schema is not configured for subscriptions.') + + def throws_when_operation_not_a_valid_operation_kind(): + test_schema = GraphQLSchema() + doc = parse('{ field }') + operation = doc.definitions[0] + assert isinstance(operation, OperationDefinitionNode) + operation.operation = 'non_existent_operation' + with raises(GraphQLError) as exc_info: + get_operation_root_type(test_schema, operation) + assert exc_info.value.message == ( + 'Can only have query, mutation and subscription operations.') diff --git a/tests/utilities/test_introspection_from_schema.py b/tests/utilities/test_introspection_from_schema.py new file mode 100644 index 00000000..6cfd0aa1 --- /dev/null +++ b/tests/utilities/test_introspection_from_schema.py @@ -0,0 +1,45 @@ +from graphql.pyutils import dedent +from graphql.type import ( + GraphQLSchema, GraphQLObjectType, GraphQLField, GraphQLString) +from graphql.utilities import ( + build_client_schema, print_schema, introspection_from_schema) + + +def introspection_to_sdl(introspection): + return print_schema(build_client_schema(introspection)) + + +def describe_introspection_from_schema(): + + schema = GraphQLSchema(GraphQLObjectType('Simple', { + 'string': GraphQLField( + GraphQLString, description='This is a string field')}, + description='This is a simple type')) + + def converts_a_simple_schema(): + introspection = introspection_from_schema(schema) + + assert introspection_to_sdl(introspection) == dedent(''' + schema { + query: Simple + } + + """This is a simple type""" + type Simple { + """This is a string field""" + string: String + } + ''') + + def converts_a_simple_schema_without_description(): + introspection = introspection_from_schema(schema, descriptions=False) + + assert introspection_to_sdl(introspection) == dedent(""" + schema { + query: Simple + } + + type Simple { + string: String + } + """) diff --git a/tests/utilities/test_lexicographic_sort_schema.py b/tests/utilities/test_lexicographic_sort_schema.py new file mode 100644 index 00000000..d42d77b9 --- /dev/null +++ b/tests/utilities/test_lexicographic_sort_schema.py @@ -0,0 +1,345 @@ +from graphql.pyutils import dedent +from graphql.utilities import ( + build_schema, print_schema, lexicographic_sort_schema) + + +def sort_sdl(sdl): + schema = build_schema(sdl) + return print_schema(lexicographic_sort_schema(schema)) + + +def describe_lexicographic_sort_schema(): + + def sort_fields(): + sorted_sdl = sort_sdl(dedent(""" + input Bar { + barB: String + barA: String + barC: String + } + + interface FooInterface { + fooB: String + fooA: String + fooC: String + } + + type FooType implements FooInterface { + fooC: String + fooA: String + fooB: String + } + + type Query { + dummy(arg: Bar): FooType + } + """)) + + assert sorted_sdl == dedent(""" + input Bar { + barA: String + barB: String + barC: String + } + + interface FooInterface { + fooA: String + fooB: String + fooC: String + } + + type FooType implements FooInterface { + fooA: String + fooB: String + fooC: String + } + + type Query { + dummy(arg: Bar): FooType + } + """) + + def sort_implemented_interfaces(): + sorted_sdl = sort_sdl(dedent(""" + interface FooA { + dummy: String + } + + interface FooB { + dummy: String + } + + interface FooC { + dummy: String + } + + type Query implements FooB & FooA & FooC { + dummy: String + } + """)) + + assert sorted_sdl == dedent(""" + interface FooA { + dummy: String + } + + interface FooB { + dummy: String + } + + interface FooC { + dummy: String + } + + type Query implements FooA & FooB & FooC { + dummy: String + } + """) + + def sort_types_in_union(): + sorted_sdl = sort_sdl(dedent(""" + type FooA { + dummy: String + } + + type FooB { + dummy: String + } + + type FooC { + dummy: String + } + + union FooUnion = FooB | FooA | FooC + + type Query { + dummy: FooUnion + } + """)) + + assert sorted_sdl == dedent(""" + type FooA { + dummy: String + } + + type FooB { + dummy: String + } + + type FooC { + dummy: String + } + + union FooUnion = FooA | FooB | FooC + + type Query { + dummy: FooUnion + } + """) + + def sort_enum_types(): + sorted_sdl = sort_sdl(dedent(""" + enum Foo { + B + C + A + } + + type Query { + dummy: Foo + } + """)) + + assert sorted_sdl == dedent(""" + enum Foo { + A + B + C + } + + type Query { + dummy: Foo + } + """) + + def sort_field_arguments(): + sorted_sdl = sort_sdl(dedent(""" + type Query { + dummy(argB: Int, argA: String, argC: Float): ID + } + """)) + + assert sorted_sdl == dedent(""" + type Query { + dummy(argA: String, argB: Int, argC: Float): ID + } + """) + + def sort_types(): + sorted_sdl = sort_sdl(dedent(""" + type Query { + dummy(arg1: FooF, arg2: FooA, arg3: FooG): FooD + } + + type FooC implements FooE { + dummy: String + } + + enum FooG { + enumValue + } + + scalar FooA + + input FooF { + dummy: String + } + + union FooD = FooC | FooB + + interface FooE { + dummy: String + } + + type FooB { + dummy: String + } + """)) + + assert sorted_sdl == dedent(""" + scalar FooA + + type FooB { + dummy: String + } + + type FooC implements FooE { + dummy: String + } + + union FooD = FooB | FooC + + interface FooE { + dummy: String + } + + input FooF { + dummy: String + } + + enum FooG { + enumValue + } + + type Query { + dummy(arg1: FooF, arg2: FooA, arg3: FooG): FooD + } + """) + + def sort_directive_arguments(): + sorted_sdl = sort_sdl(dedent(""" + directive @test(argC: Float, argA: String, argB: Int) on FIELD + + type Query { + dummy: String + } + """)) + + assert sorted_sdl == dedent(""" + directive @test(argA: String, argB: Int, argC: Float) on FIELD + + type Query { + dummy: String + } + """) + + def sort_directive_locations(): + sorted_sdl = sort_sdl(dedent(""" + directive @test(argC: Float, argA: String, argB: Int) on UNION | FIELD | ENUM + + type Query { + dummy: String + } + """)) # noqa + + assert sorted_sdl == dedent(""" + directive @test(argA: String, argB: Int, argC: Float) on ENUM | FIELD | UNION + + type Query { + dummy: String + } + """) # noqa + + def sort_directives(): + sorted_sdl = sort_sdl(dedent(""" + directive @fooC on FIELD + + directive @fooB on UNION + + directive @fooA on ENUM + + type Query { + dummy: String + } + """)) + + assert sorted_sdl == dedent(""" + directive @fooA on ENUM + + directive @fooB on UNION + + directive @fooC on FIELD + + type Query { + dummy: String + } + """) + + def sort_recursive_types(): + sorted_sdl = sort_sdl(dedent(""" + interface FooC { + fooB: FooB + fooA: FooA + fooC: FooC + } + + type FooB implements FooC { + fooB: FooB + fooA: FooA + } + + type FooA implements FooC { + fooB: FooB + fooA: FooA + } + + type Query { + fooC: FooC + fooB: FooB + fooA: FooA + } + """)) + + assert sorted_sdl == dedent(""" + type FooA implements FooC { + fooA: FooA + fooB: FooB + } + + type FooB implements FooC { + fooA: FooA + fooB: FooB + } + + interface FooC { + fooA: FooA + fooB: FooB + fooC: FooC + } + + type Query { + fooA: FooA + fooB: FooB + fooC: FooC + } + """) diff --git a/tests/utilities/test_schema_printer.py b/tests/utilities/test_schema_printer.py new file mode 100644 index 00000000..11fb22fc --- /dev/null +++ b/tests/utilities/test_schema_printer.py @@ -0,0 +1,734 @@ +from graphql.language import DirectiveLocation +from graphql.pyutils import dedent +from graphql.type import ( + GraphQLArgument, GraphQLBoolean, GraphQLEnumType, GraphQLEnumValue, + GraphQLField, GraphQLInputObjectType, GraphQLInt, GraphQLInterfaceType, + GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, + GraphQLSchema, GraphQLString, GraphQLUnionType, GraphQLType, + GraphQLNullableType, GraphQLInputField, GraphQLDirective) +from graphql.utilities import ( + build_schema, print_schema, print_introspection_schema) + + +def print_for_test(schema: GraphQLSchema) -> str: + schema_text = print_schema(schema) + # keep print_schema and build_schema in sync + assert print_schema(build_schema(schema_text)) == schema_text + return schema_text + + +def print_single_field_schema(field: GraphQLField): + Query = GraphQLObjectType( + name='Query', fields={'singleField': field}) + return print_for_test(GraphQLSchema(query=Query)) + + +def list_of(type_: GraphQLType): + return GraphQLList(type_) + + +def non_null(type_: GraphQLNullableType): + return GraphQLNonNull(type_) + + +def describe_type_system_printer(): + + def prints_string_field(): + output = print_single_field_schema(GraphQLField(GraphQLString)) + assert output == dedent(""" + type Query { + singleField: String + } + """) + + def prints_list_of_string_field(): + output = print_single_field_schema( + GraphQLField(list_of(GraphQLString))) + assert output == dedent(""" + type Query { + singleField: [String] + } + """) + + def prints_non_null_string_field(): + output = print_single_field_schema( + GraphQLField(non_null(GraphQLString))) + assert output == dedent(""" + type Query { + singleField: String! + } + """) + + def prints_non_null_list_of_string_field(): + output = print_single_field_schema( + GraphQLField(non_null(list_of(GraphQLString)))) + assert output == dedent(""" + type Query { + singleField: [String]! + } + """) + + def prints_list_of_non_null_string_field(): + output = print_single_field_schema( + GraphQLField((list_of(non_null(GraphQLString))))) + assert output == dedent(""" + type Query { + singleField: [String!] + } + """) + + def prints_non_null_list_of_non_null_string_field(): + output = print_single_field_schema(GraphQLField( + non_null(list_of(non_null(GraphQLString))))) + assert output == dedent(""" + type Query { + singleField: [String!]! + } + """) + + def prints_object_field(): + FooType = GraphQLObjectType( + name='Foo', fields={'str': GraphQLField(GraphQLString)}) + + Query = GraphQLObjectType( + name='Query', fields={'foo': GraphQLField(FooType)}) + + Schema = GraphQLSchema(query=Query) + output = print_for_test(Schema) + assert output == dedent(""" + type Foo { + str: String + } + + type Query { + foo: Foo + } + """) + + def prints_string_field_with_int_arg(): + output = print_single_field_schema(GraphQLField( + type_=GraphQLString, + args={'argOne': GraphQLArgument(GraphQLInt)})) + assert output == dedent(""" + type Query { + singleField(argOne: Int): String + } + """) + + def prints_string_field_with_int_arg_with_default(): + output = print_single_field_schema(GraphQLField( + type_=GraphQLString, + args={'argOne': GraphQLArgument(GraphQLInt, default_value=2)})) + assert output == dedent(""" + type Query { + singleField(argOne: Int = 2): String + } + """) + + def prints_string_field_with_string_arg_with_default(): + output = print_single_field_schema(GraphQLField( + type_=GraphQLString, + args={'argOne': GraphQLArgument( + GraphQLString, default_value='tes\t de\fault')})) + assert output == dedent(r""" + type Query { + singleField(argOne: String = "tes\t de\fault"): String + } + """) + + def prints_string_field_with_int_arg_with_default_null(): + output = print_single_field_schema(GraphQLField( + type_=GraphQLString, + args={'argOne': GraphQLArgument(GraphQLInt, default_value=None)})) + assert output == dedent(""" + type Query { + singleField(argOne: Int = null): String + } + """) + + def prints_string_field_with_non_null_int_arg(): + output = print_single_field_schema(GraphQLField( + type_=GraphQLString, + args={'argOne': GraphQLArgument(non_null(GraphQLInt))})) + assert output == dedent(""" + type Query { + singleField(argOne: Int!): String + } + """) + + def prints_string_field_with_multiple_args(): + output = print_single_field_schema(GraphQLField( + type_=GraphQLString, + args={ + 'argOne': GraphQLArgument(GraphQLInt), + 'argTwo': GraphQLArgument(GraphQLString)})) + assert output == dedent(""" + type Query { + singleField(argOne: Int, argTwo: String): String + } + """) + + def prints_string_field_with_multiple_args_first_is_default(): + output = print_single_field_schema(GraphQLField( + type_=GraphQLString, + args={ + 'argOne': GraphQLArgument(GraphQLInt, default_value=1), + 'argTwo': GraphQLArgument(GraphQLString), + 'argThree': GraphQLArgument(GraphQLBoolean)})) + assert output == dedent(""" + type Query { + singleField(argOne: Int = 1, argTwo: String, argThree: Boolean): String + } + """) # noqa + + def prints_string_field_with_multiple_args_second_is_default(): + output = print_single_field_schema(GraphQLField( + type_=GraphQLString, + args={ + 'argOne': GraphQLArgument(GraphQLInt), + 'argTwo': GraphQLArgument(GraphQLString, default_value="foo"), + 'argThree': GraphQLArgument(GraphQLBoolean)})) + assert output == dedent(""" + type Query { + singleField(argOne: Int, argTwo: String = "foo", argThree: Boolean): String + } + """) # noqa + + def prints_string_field_with_multiple_args_last_is_default(): + output = print_single_field_schema(GraphQLField( + type_=GraphQLString, + args={ + 'argOne': GraphQLArgument(GraphQLInt), + 'argTwo': GraphQLArgument(GraphQLString), + 'argThree': + GraphQLArgument(GraphQLBoolean, default_value=False)})) + assert output == dedent(""" + type Query { + singleField(argOne: Int, argTwo: String, argThree: Boolean = false): String + } + """) # noqa + + def prints_custom_query_root_type(): + CustomQueryType = GraphQLObjectType( + 'CustomQueryType', {'bar': GraphQLField(GraphQLString)}) + + Schema = GraphQLSchema(CustomQueryType) + + output = print_for_test(Schema) + assert output == dedent(""" + schema { + query: CustomQueryType + } + + type CustomQueryType { + bar: String + } + """) + + def prints_interface(): + FooType = GraphQLInterfaceType( + name='Foo', + fields={'str': GraphQLField(GraphQLString)}) + + BarType = GraphQLObjectType( + name='Bar', + fields={'str': GraphQLField(GraphQLString)}, + interfaces=[FooType]) + + Root = GraphQLObjectType( + name='Root', + fields={'bar': GraphQLField(BarType)}) + + Schema = GraphQLSchema(Root, types=[BarType]) + output = print_for_test(Schema) + assert output == dedent(""" + schema { + query: Root + } + + type Bar implements Foo { + str: String + } + + interface Foo { + str: String + } + + type Root { + bar: Bar + } + """) + + def prints_multiple_interfaces(): + FooType = GraphQLInterfaceType( + name='Foo', + fields={'str': GraphQLField(GraphQLString)}) + + BaazType = GraphQLInterfaceType( + name='Baaz', + fields={'int': GraphQLField(GraphQLInt)}) + + BarType = GraphQLObjectType( + name='Bar', + fields={ + 'str': GraphQLField(GraphQLString), + 'int': GraphQLField(GraphQLInt)}, + interfaces=[FooType, BaazType]) + + Root = GraphQLObjectType( + name='Root', + fields={'bar': GraphQLField(BarType)}) + + Schema = GraphQLSchema(Root, types=[BarType]) + output = print_for_test(Schema) + assert output == dedent(""" + schema { + query: Root + } + + interface Baaz { + int: Int + } + + type Bar implements Foo & Baaz { + str: String + int: Int + } + + interface Foo { + str: String + } + + type Root { + bar: Bar + } + """) + + def prints_unions(): + FooType = GraphQLObjectType( + name='Foo', + fields={'bool': GraphQLField(GraphQLBoolean)}) + + BarType = GraphQLObjectType( + name='Bar', + fields={'str': GraphQLField(GraphQLString)}) + + SingleUnion = GraphQLUnionType( + name='SingleUnion', + types=[FooType]) + + MultipleUnion = GraphQLUnionType( + name='MultipleUnion', + types=[FooType, BarType]) + + Root = GraphQLObjectType( + name='Root', + fields={ + 'single': GraphQLField(SingleUnion), + 'multiple': GraphQLField(MultipleUnion)}) + + Schema = GraphQLSchema(Root) + output = print_for_test(Schema) + assert output == dedent(""" + schema { + query: Root + } + + type Bar { + str: String + } + + type Foo { + bool: Boolean + } + + union MultipleUnion = Foo | Bar + + type Root { + single: SingleUnion + multiple: MultipleUnion + } + + union SingleUnion = Foo + """) + + def prints_input_type(): + InputType = GraphQLInputObjectType( + name='InputType', + fields={'int': GraphQLInputField(GraphQLInt)}) + + Root = GraphQLObjectType( + name='Root', + fields={'str': GraphQLField( + GraphQLString, args={'argOne': GraphQLArgument(InputType)})}) + + Schema = GraphQLSchema(Root) + output = print_for_test(Schema) + assert output == dedent(""" + schema { + query: Root + } + + input InputType { + int: Int + } + + type Root { + str(argOne: InputType): String + } + """) + + def prints_custom_scalar(): + OddType = GraphQLScalarType( + name='Odd', + serialize=lambda value: value if value % 2 else None) + + Root = GraphQLObjectType( + name='Root', + fields={'odd': GraphQLField(OddType)}) + + Schema = GraphQLSchema(Root) + output = print_for_test(Schema) + assert output == dedent(""" + schema { + query: Root + } + + scalar Odd + + type Root { + odd: Odd + } + """) + + def prints_enum(): + RGBType = GraphQLEnumType( + name='RGB', + values={ + 'RED': GraphQLEnumValue(0), + 'GREEN': GraphQLEnumValue(1), + 'BLUE': GraphQLEnumValue(2)}) + + Root = GraphQLObjectType( + name='Root', + fields={'rgb': GraphQLField(RGBType)}) + + Schema = GraphQLSchema(Root) + output = print_for_test(Schema) + assert output == dedent(""" + schema { + query: Root + } + + enum RGB { + RED + GREEN + BLUE + } + + type Root { + rgb: RGB + } + """) + + def prints_custom_directives(): + Query = GraphQLObjectType( + name='Query', + fields={'field': GraphQLField(GraphQLString)}) + + CustomDirective = GraphQLDirective( + name='customDirective', + locations=[DirectiveLocation.FIELD]) + + Schema = GraphQLSchema( + query=Query, + directives=[CustomDirective]) + output = print_for_test(Schema) + assert output == dedent(""" + directive @customDirective on FIELD + + type Query { + field: String + } + """) + + def one_line_prints_a_short_description(): + description = 'This field is awesome' + output = print_single_field_schema(GraphQLField( + GraphQLString, description=description)) + assert output == dedent(''' + type Query { + """This field is awesome""" + singleField: String + } + ''') + recreated_root = build_schema(output).type_map['Query'] + recreated_field = recreated_root.fields['singleField'] + assert recreated_field.description == description + + def does_not_one_line_print_a_description_that_ends_with_a_quote(): + description = 'This field is "awesome"' + output = print_single_field_schema(GraphQLField( + GraphQLString, description=description)) + assert output == dedent(''' + type Query { + """ + This field is "awesome" + """ + singleField: String + } + ''') + recreated_root = build_schema(output).type_map['Query'] + recreated_field = recreated_root.fields['singleField'] + assert recreated_field.description == description + + def preserves_leading_spaces_when_printing_a_description(): + description = ' This field is "awesome"' + output = print_single_field_schema(GraphQLField( + GraphQLString, description=description)) + assert output == dedent(''' + type Query { + """ This field is "awesome" + """ + singleField: String + } + ''') + recreated_root = build_schema(output).type_map['Query'] + recreated_field = recreated_root.fields['singleField'] + assert recreated_field.description == description + + def prints_introspection_schema(): + Root = GraphQLObjectType( + name='Root', + fields={'onlyField': GraphQLField(GraphQLString)}) + + Schema = GraphQLSchema(Root) + output = print_introspection_schema(Schema) + assert output == dedent(''' + schema { + query: Root + } + + """ + Directs the executor to include this field or fragment only when the `if` argument is true. + """ + directive @include( + """Included when true.""" + if: Boolean! + ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + + """ + Directs the executor to skip this field or fragment when the `if` argument is true. + """ + directive @skip( + """Skipped when true.""" + if: Boolean! + ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + + """Marks an element of a GraphQL schema as no longer supported.""" + directive @deprecated( + """ + Explains why this element was deprecated, usually also including a suggestion + for how to access supported similar data. Formatted in + [Markdown](https://daringfireball.net/projects/markdown/). + """ + reason: String = "No longer supported" + ) on FIELD_DEFINITION | ENUM_VALUE + + """ + A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document. + + In some cases, you need to provide options to alter GraphQL's execution behavior + in ways field arguments will not suffice, such as conditionally including or + skipping a field. Directives provide this by describing additional information + to the executor. + """ + type __Directive { + name: String! + description: String + locations: [__DirectiveLocation!]! + args: [__InputValue!]! + } + + """ + A Directive can be adjacent to many parts of the GraphQL language, a + __DirectiveLocation describes one such possible adjacencies. + """ + enum __DirectiveLocation { + """Location adjacent to a query operation.""" + QUERY + + """Location adjacent to a mutation operation.""" + MUTATION + + """Location adjacent to a subscription operation.""" + SUBSCRIPTION + + """Location adjacent to a field.""" + FIELD + + """Location adjacent to a fragment definition.""" + FRAGMENT_DEFINITION + + """Location adjacent to a fragment spread.""" + FRAGMENT_SPREAD + + """Location adjacent to an inline fragment.""" + INLINE_FRAGMENT + + """Location adjacent to a schema definition.""" + SCHEMA + + """Location adjacent to a scalar definition.""" + SCALAR + + """Location adjacent to an object type definition.""" + OBJECT + + """Location adjacent to a field definition.""" + FIELD_DEFINITION + + """Location adjacent to an argument definition.""" + ARGUMENT_DEFINITION + + """Location adjacent to an interface definition.""" + INTERFACE + + """Location adjacent to a union definition.""" + UNION + + """Location adjacent to an enum definition.""" + ENUM + + """Location adjacent to an enum value definition.""" + ENUM_VALUE + + """Location adjacent to an input object type definition.""" + INPUT_OBJECT + + """Location adjacent to an input object field definition.""" + INPUT_FIELD_DEFINITION + } + + """ + One possible value for a given Enum. Enum values are unique values, not a + placeholder for a string or numeric value. However an Enum value is returned in + a JSON response as a string. + """ + type __EnumValue { + name: String! + description: String + isDeprecated: Boolean! + deprecationReason: String + } + + """ + Object and Interface types are described by a list of Fields, each of which has + a name, potentially a list of arguments, and a return type. + """ + type __Field { + name: String! + description: String + args: [__InputValue!]! + type: __Type! + isDeprecated: Boolean! + deprecationReason: String + } + + """ + Arguments provided to Fields or Directives and the input fields of an + InputObject are represented as Input Values which describe their type and + optionally a default value. + """ + type __InputValue { + name: String! + description: String + type: __Type! + + """ + A GraphQL-formatted string representing the default value for this input value. + """ + defaultValue: String + } + + """ + A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all + available types and directives on the server, as well as the entry points for + query, mutation, and subscription operations. + """ + type __Schema { + """A list of all types supported by this server.""" + types: [__Type!]! + + """The type that query operations will be rooted at.""" + queryType: __Type! + + """ + If this server supports mutation, the type that mutation operations will be rooted at. + """ + mutationType: __Type + + """ + If this server support subscription, the type that subscription operations will be rooted at. + """ + subscriptionType: __Type + + """A list of all directives supported by this server.""" + directives: [__Directive!]! + } + + """ + The fundamental unit of any GraphQL Schema is the type. There are many kinds of + types in GraphQL as represented by the `__TypeKind` enum. + + Depending on the kind of a type, certain fields describe information about that + type. Scalar types provide no information beyond a name and description, while + Enum types provide their values. Object and Interface types provide the fields + they describe. Abstract types, Union and Interface, provide the Object types + possible at runtime. List and NonNull types compose other types. + """ + type __Type { + kind: __TypeKind! + name: String + description: String + fields(includeDeprecated: Boolean = false): [__Field!] + interfaces: [__Type!] + possibleTypes: [__Type!] + enumValues(includeDeprecated: Boolean = false): [__EnumValue!] + inputFields: [__InputValue!] + ofType: __Type + } + + """An enum describing what kind of type a given `__Type` is.""" + enum __TypeKind { + """Indicates this type is a scalar.""" + SCALAR + + """ + Indicates this type is an object. `fields` and `interfaces` are valid fields. + """ + OBJECT + + """ + Indicates this type is an interface. `fields` and `possibleTypes` are valid fields. + """ + INTERFACE + + """Indicates this type is a union. `possibleTypes` is a valid field.""" + UNION + + """Indicates this type is an enum. `enumValues` is a valid field.""" + ENUM + + """ + Indicates this type is an input object. `inputFields` is a valid field. + """ + INPUT_OBJECT + + """Indicates this type is a list. `ofType` is a valid field.""" + LIST + + """Indicates this type is a non-null. `ofType` is a valid field.""" + NON_NULL + } + ''') # noqa diff --git a/tests/utilities/test_separate_operations.py b/tests/utilities/test_separate_operations.py new file mode 100644 index 00000000..dc0449a1 --- /dev/null +++ b/tests/utilities/test_separate_operations.py @@ -0,0 +1,158 @@ +from graphql.language import parse, print_ast +from graphql.pyutils import dedent +from graphql.utilities import separate_operations + + +def describe_separate_operations(): + + def separates_one_ast_into_multiple_maintaining_document_order(): + ast = parse(""" + { + ...Y + ...X + } + + query One { + foo + bar + ...A + ...X + } + + fragment A on T { + field + ...B + } + + fragment X on T { + fieldX + } + + query Two { + ...A + ...Y + baz + } + + fragment Y on T { + fieldY + } + + fragment B on T { + something + } + + """) + + separated_asts = separate_operations(ast) + + assert list(separated_asts) == ['', 'One', 'Two'] + + assert print_ast(separated_asts['']) == dedent(""" + { + ...Y + ...X + } + + fragment X on T { + fieldX + } + + fragment Y on T { + fieldY + } + """) + + assert print_ast(separated_asts['One']) == dedent(""" + query One { + foo + bar + ...A + ...X + } + + fragment A on T { + field + ...B + } + + fragment X on T { + fieldX + } + + fragment B on T { + something + } + """) + + assert print_ast(separated_asts['Two']) == dedent(""" + fragment A on T { + field + ...B + } + + query Two { + ...A + ...Y + baz + } + + fragment Y on T { + fieldY + } + + fragment B on T { + something + } + """) + + def survives_circular_dependencies(): + ast = parse(""" + query One { + ...A + } + + fragment A on T { + ...B + } + + fragment B on T { + ...A + } + + query Two { + ...B + } + """) + + separated_asts = separate_operations(ast) + + assert list(separated_asts) == ['One', 'Two'] + + assert print_ast(separated_asts['One']) == dedent(""" + query One { + ...A + } + + fragment A on T { + ...B + } + + fragment B on T { + ...A + } + """) + + assert print_ast(separated_asts['Two']) == dedent(""" + fragment A on T { + ...B + } + + fragment B on T { + ...A + } + + query Two { + ...B + } + """) diff --git a/tests/utilities/test_type_comparators.py b/tests/utilities/test_type_comparators.py new file mode 100644 index 00000000..608f6f19 --- /dev/null +++ b/tests/utilities/test_type_comparators.py @@ -0,0 +1,82 @@ +from pytest import fixture + +from graphql.type import ( + GraphQLField, GraphQLFloat, GraphQLInt, GraphQLInterfaceType, GraphQLList, + GraphQLNonNull, GraphQLObjectType, GraphQLOutputType, GraphQLSchema, + GraphQLString, GraphQLUnionType) +from graphql.utilities import is_equal_type, is_type_sub_type_of + + +def describe_type_comparators(): + + def describe_is_equal_type(): + + def same_references_are_equal(): + assert is_equal_type(GraphQLString, GraphQLString) is True + + def int_and_float_are_not_equal(): + assert is_equal_type(GraphQLInt, GraphQLFloat) is False + + def lists_of_same_type_are_equal(): + assert is_equal_type( + GraphQLList(GraphQLInt), GraphQLList(GraphQLInt)) is True + + def lists_is_not_equal_to_item(): + assert is_equal_type(GraphQLList(GraphQLInt), GraphQLInt) is False + + def nonnull_of_same_type_are_equal(): + assert is_equal_type( + GraphQLNonNull(GraphQLInt), GraphQLNonNull(GraphQLInt)) is True + + def nonnull_is_not_equal_to_nullable(): + assert is_equal_type( + GraphQLNonNull(GraphQLInt), GraphQLInt) is False + + def describe_is_type_sub_type_of(): + + @fixture + def test_schema(field_type: GraphQLOutputType=GraphQLString): + return GraphQLSchema( + query=GraphQLObjectType('Query', { + 'field': GraphQLField(field_type)})) + + def same_reference_is_subtype(): + assert is_type_sub_type_of( + test_schema(), GraphQLString, GraphQLString) is True + + def int_is_not_subtype_of_float(): + assert is_type_sub_type_of( + test_schema(), GraphQLInt, GraphQLFloat) is False + + def non_null_is_subtype_of_nullable(): + assert is_type_sub_type_of( + test_schema(), GraphQLNonNull(GraphQLInt), GraphQLInt) is True + + def nullable_is_not_subtype_of_non_null(): + assert is_type_sub_type_of( + test_schema(), GraphQLInt, GraphQLNonNull(GraphQLInt)) is False + + def item_is_not_subtype_of_list(): + assert not is_type_sub_type_of( + test_schema(), GraphQLInt, GraphQLList(GraphQLInt)) + + def list_is_not_subtype_of_item(): + assert not is_type_sub_type_of( + test_schema(), GraphQLList(GraphQLInt), GraphQLInt) + + def member_is_subtype_of_union(): + member = GraphQLObjectType('Object', { + 'field': GraphQLField(GraphQLString)}) + union = GraphQLUnionType('Union', [member]) + schema = test_schema(union) + assert is_type_sub_type_of(schema, member, union) + + def implementation_is_subtype_of_interface(): + iface = GraphQLInterfaceType('Interface', { + 'field': GraphQLField(GraphQLString)}) + impl = GraphQLObjectType( + 'Object', + fields={'field': GraphQLField(GraphQLString)}, + interfaces=[iface]) + schema = test_schema(impl) + assert is_type_sub_type_of(schema, impl, iface) diff --git a/tests/utilities/test_value_from_ast.py b/tests/utilities/test_value_from_ast.py new file mode 100644 index 00000000..6c3f3635 --- /dev/null +++ b/tests/utilities/test_value_from_ast.py @@ -0,0 +1,172 @@ +from math import nan, isnan +from pytest import fixture + +from graphql.error import INVALID +from graphql.language import parse_value +from graphql.type import ( + GraphQLBoolean, GraphQLEnumType, GraphQLFloat, + GraphQLID, GraphQLInputField, GraphQLInputObjectType, GraphQLInt, + GraphQLList, GraphQLNonNull, GraphQLString) +from graphql.utilities import value_from_ast + + +def describe_value_from_ast(): + + @fixture + def test_case(type_, value_text, expected): + value_node = parse_value(value_text) + assert value_from_ast(value_node, type_) == expected + + @fixture + def test_case_expect_nan(type_, value_text): + value_node = parse_value(value_text) + assert isnan(value_from_ast(value_node, type_)) + + @fixture + def test_case_with_vars(variables, type_, value_text, expected): + value_node = parse_value(value_text) + assert value_from_ast(value_node, type_, variables) == expected + + def rejects_empty_input(): + # noinspection PyTypeChecker + assert value_from_ast(None, GraphQLBoolean) is INVALID + + def converts_according_to_input_coercion_rules(): + test_case(GraphQLBoolean, 'true', True) + test_case(GraphQLBoolean, 'false', False) + test_case(GraphQLInt, '123', 123) + test_case(GraphQLFloat, '123', 123) + test_case(GraphQLFloat, '123.456', 123.456) + test_case(GraphQLString, '"abc123"', 'abc123') + test_case(GraphQLID, '123456', '123456') + test_case(GraphQLID, '"123456"', '123456') + + def does_not_convert_when_input_coercion_rules_reject_a_value(): + test_case(GraphQLBoolean, '123', INVALID) + test_case(GraphQLInt, '123.456', INVALID) + test_case(GraphQLInt, 'true', INVALID) + test_case(GraphQLInt, '"123"', INVALID) + test_case(GraphQLFloat, '"123"', INVALID) + test_case(GraphQLString, '123', INVALID) + test_case(GraphQLString, 'true', INVALID) + test_case(GraphQLID, '123.456', INVALID) + + test_enum = GraphQLEnumType('TestColor', { + 'RED': 1, + 'GREEN': 2, + 'BLUE': 3, + 'NULL': None, + 'INVALID': INVALID, + 'NAN': nan}) + + def converts_enum_values_according_to_input_coercion_rules(): + test_case(test_enum, 'RED', 1) + test_case(test_enum, 'BLUE', 3) + test_case(test_enum, 'YELLOW', INVALID) + test_case(test_enum, '3', INVALID) + test_case(test_enum, '"BLUE"', INVALID) + test_case(test_enum, 'null', None) + test_case(test_enum, 'NULL', None) + test_case(test_enum, 'INVALID', INVALID) + # nan is not equal to itself, needs a special test case + test_case_expect_nan(test_enum, 'NAN') + + # Boolean! + non_null_bool = GraphQLNonNull(GraphQLBoolean) + # [Boolean] + list_of_bool = GraphQLList(GraphQLBoolean) + # [Boolean!] + list_of_non_null_bool = GraphQLList(non_null_bool) + # [Boolean]! + non_null_list_of_bool = GraphQLNonNull(list_of_bool) + # [Boolean!]! + non_null_list_of_non_mull_bool = GraphQLNonNull(list_of_non_null_bool) + + def coerces_to_null_unless_non_null(): + test_case(GraphQLBoolean, 'null', None) + test_case(non_null_bool, 'null', INVALID) + + def coerces_lists_of_values(): + test_case(list_of_bool, 'true', [True]) + test_case(list_of_bool, '123', INVALID) + test_case(list_of_bool, 'null', None) + test_case(list_of_bool, '[true, false]', [True, False]) + test_case(list_of_bool, '[true, 123]', INVALID) + test_case(list_of_bool, '[true, null]', [True, None]) + test_case(list_of_bool, '{ true: true }', INVALID) + + def coerces_non_null_lists_of_values(): + test_case(non_null_list_of_bool, 'true', [True]) + test_case(non_null_list_of_bool, '123', INVALID) + test_case(non_null_list_of_bool, 'null', INVALID) + test_case(non_null_list_of_bool, '[true, false]', [True, False]) + test_case(non_null_list_of_bool, '[true, 123]', INVALID) + test_case(non_null_list_of_bool, '[true, null]', [True, None]) + + def coerces_lists_of_non_null_values(): + test_case(list_of_non_null_bool, 'true', [True]) + test_case(list_of_non_null_bool, '123', INVALID) + test_case(list_of_non_null_bool, 'null', None) + test_case(list_of_non_null_bool, '[true, false]', [True, False]) + test_case(list_of_non_null_bool, '[true, 123]', INVALID) + test_case(list_of_non_null_bool, '[true, null]', INVALID) + + def coerces_non_null_lists_of_non_null_values(): + test_case(non_null_list_of_non_mull_bool, 'true', [True]) + test_case(non_null_list_of_non_mull_bool, '123', INVALID) + test_case(non_null_list_of_non_mull_bool, 'null', INVALID) + test_case(non_null_list_of_non_mull_bool, + '[true, false]', [True, False]) + test_case(non_null_list_of_non_mull_bool, '[true, 123]', INVALID) + test_case(non_null_list_of_non_mull_bool, '[true, null]', INVALID) + + test_input_obj = GraphQLInputObjectType('TestInput', { + 'int': GraphQLInputField(GraphQLInt, default_value=42), + 'bool': GraphQLInputField(GraphQLBoolean), + 'requiredBool': GraphQLInputField(non_null_bool)}) + + def coerces_input_objects_according_to_input_coercion_rules(): + test_case(test_input_obj, 'null', None) + test_case(test_input_obj, '123', INVALID) + test_case(test_input_obj, '[]', INVALID) + test_case(test_input_obj, '{ int: 123, requiredBool: false }', { + 'int': 123, + 'requiredBool': False, + }) + test_case(test_input_obj, '{ bool: true, requiredBool: false }', { + 'int': 42, + 'bool': True, + 'requiredBool': False, + }) + test_case(test_input_obj, + '{ int: true, requiredBool: true }', INVALID) + test_case(test_input_obj, '{ requiredBool: null }', INVALID) + test_case(test_input_obj, '{ bool: true }', INVALID) + + def accepts_variable_values_assuming_already_coerced(): + test_case_with_vars({}, GraphQLBoolean, '$var', INVALID) + test_case_with_vars({'var': True}, GraphQLBoolean, '$var', True) + test_case_with_vars({'var': None}, GraphQLBoolean, '$var', None) + + def asserts_variables_are_provided_as_items_in_lists(): + test_case_with_vars({}, list_of_bool, '[ $foo ]', [None]) + test_case_with_vars({}, list_of_non_null_bool, '[ $foo ]', INVALID) + test_case_with_vars( + {'foo': True}, list_of_non_null_bool, '[ $foo ]', [True]) + # Note: variables are expected to have already been coerced, so we + # do not expect the singleton wrapping behavior for variables. + test_case_with_vars( + {'foo': True}, list_of_non_null_bool, '$foo', True) + test_case_with_vars( + {'foo': [True]}, list_of_non_null_bool, '$foo', [True]) + + def omits_input_object_fields_for_unprovided_variables(): + test_case_with_vars( + {}, test_input_obj, + '{ int: $foo, bool: $foo, requiredBool: true }', + {'int': 42, 'requiredBool': True}) + test_case_with_vars( + {}, test_input_obj, '{ requiredBool: $foo }', INVALID) + test_case_with_vars( + {'foo': True}, test_input_obj, '{ requiredBool: $foo }', + {'int': 42, 'requiredBool': True}) diff --git a/tests/utilities/test_value_from_ast_untyped.py b/tests/utilities/test_value_from_ast_untyped.py new file mode 100644 index 00000000..e93dff0a --- /dev/null +++ b/tests/utilities/test_value_from_ast_untyped.py @@ -0,0 +1,49 @@ +from pytest import fixture + +from graphql.error import INVALID +from graphql.language import parse_value +from graphql.utilities import value_from_ast_untyped + + +def describe_value_from_ast_untyped(): + + @fixture + def test_case(value_text, expected): + value_node = parse_value(value_text) + assert value_from_ast_untyped(value_node) == expected + + @fixture + def test_case_with_vars(value_text, variables, expected): + value_node = parse_value(value_text) + assert value_from_ast_untyped(value_node, variables) == expected + + def parses_simple_values(): + test_case('null', None) + test_case('true', True) + test_case('false', False) + test_case('123', 123) + test_case('123.456', 123.456) + test_case('"abc123"', 'abc123') + + def parses_lists_of_values(): + test_case('[true, false]', [True, False]) + test_case('[true, 123.45]', [True, 123.45]) + test_case('[true, null]', [True, None]) + test_case('[true, ["foo", 1.2]]', [True, ['foo', 1.2]]) + + def parses_input_objects(): + test_case('{ int: 123, bool: false }', {'int': 123, 'bool': False}) + test_case('{ foo: [ { bar: "baz"} ] }', {'foo': [{'bar': 'baz'}]}) + + def parses_enum_values_as_plain_strings(): + test_case('TEST_ENUM_VALUE', 'TEST_ENUM_VALUE') + test_case('[TEST_ENUM_VALUE]', ['TEST_ENUM_VALUE']) + + def parses_variables(): + test_case_with_vars('$testVariable', {'testVariable': 'foo'}, 'foo') + test_case_with_vars( + '[$testVariable]', {'testVariable': 'foo'}, ['foo']) + test_case_with_vars( + '{a:[$testVariable]}', {'testVariable': 'foo'}, {'a': ['foo']}) + test_case_with_vars('$testVariable', {'testVariable': None}, None) + test_case_with_vars('$testVariable', {}, INVALID) diff --git a/tests/validation/__init__.py b/tests/validation/__init__.py new file mode 100644 index 00000000..f4280b8e --- /dev/null +++ b/tests/validation/__init__.py @@ -0,0 +1 @@ +"""Tests for graphql.validation""" diff --git a/tests/validation/harness.py b/tests/validation/harness.py new file mode 100644 index 00000000..31167e63 --- /dev/null +++ b/tests/validation/harness.py @@ -0,0 +1,260 @@ +from graphql.language.parser import parse +from graphql.type import ( + GraphQLArgument, GraphQLBoolean, GraphQLEnumType, + GraphQLEnumValue, GraphQLField, GraphQLFloat, + GraphQLID, GraphQLInputField, + GraphQLInputObjectType, GraphQLInt, + GraphQLInterfaceType, GraphQLList, GraphQLNonNull, + GraphQLObjectType, GraphQLSchema, GraphQLString, + GraphQLUnionType, GraphQLScalarType) +from graphql.type.directives import ( + DirectiveLocation, GraphQLDirective, + GraphQLIncludeDirective, + GraphQLSkipDirective) +from graphql.validation import validate + +Being = GraphQLInterfaceType('Being', { + 'name': GraphQLField(GraphQLString, { + 'surname': GraphQLArgument(GraphQLBoolean)})}) + +Pet = GraphQLInterfaceType('Pet', { + 'name': GraphQLField(GraphQLString, { + 'surname': GraphQLArgument(GraphQLBoolean)})}) + +Canine = GraphQLInterfaceType('Canine', { + 'name': GraphQLField(GraphQLString, { + 'surname': GraphQLArgument(GraphQLBoolean)})}) + +DogCommand = GraphQLEnumType('DogCommand', { + 'SIT': GraphQLEnumValue(0), + 'HEEL': GraphQLEnumValue(1), + 'DOWN': GraphQLEnumValue(2)}) + +Dog = GraphQLObjectType('Dog', { + 'name': GraphQLField(GraphQLString, { + 'surname': GraphQLArgument(GraphQLBoolean)}), + 'nickname': GraphQLField(GraphQLString), + 'barkVolume': GraphQLField(GraphQLInt), + 'barks': GraphQLField(GraphQLBoolean), + 'doesKnowCommand': GraphQLField(GraphQLBoolean, { + 'dogCommand': GraphQLArgument(DogCommand)}), + 'isHousetrained': GraphQLField( + GraphQLBoolean, + args={'atOtherHomes': GraphQLArgument( + GraphQLBoolean, default_value=True)}), + 'isAtLocation': GraphQLField( + GraphQLBoolean, + args={'x': GraphQLArgument(GraphQLInt), + 'y': GraphQLArgument(GraphQLInt)})}, + interfaces=[Being, Pet, Canine], is_type_of=lambda: True) + +Cat = GraphQLObjectType('Cat', lambda: { + 'furColor': GraphQLField(FurColor), + 'name': GraphQLField(GraphQLString, { + 'surname': GraphQLArgument(GraphQLBoolean)}), + 'nickname': GraphQLField(GraphQLString)}, + interfaces=[Being, Pet], is_type_of=lambda: True) + +CatOrDog = GraphQLUnionType('CatOrDog', [Dog, Cat]) + +Intelligent = GraphQLInterfaceType('Intelligent', { + 'iq': GraphQLField(GraphQLInt)}) + +Human = GraphQLObjectType( + name='Human', + interfaces=[Being, Intelligent], + is_type_of=lambda: True, + fields={ + 'name': GraphQLField(GraphQLString, { + 'surname': GraphQLArgument(GraphQLBoolean)}), + 'pets': GraphQLField(GraphQLList(Pet)), + 'iq': GraphQLField(GraphQLInt)}) + +Alien = GraphQLObjectType( + name='Alien', + is_type_of=lambda: True, + interfaces=[Being, Intelligent], + fields={ + 'iq': GraphQLField(GraphQLInt), + 'name': GraphQLField(GraphQLString, { + 'surname': GraphQLArgument(GraphQLBoolean)}), + 'numEyes': GraphQLField(GraphQLInt)}) + +DogOrHuman = GraphQLUnionType('DogOrHuman', [Dog, Human]) + +HumanOrAlien = GraphQLUnionType('HumanOrAlien', [Human, Alien]) + +FurColor = GraphQLEnumType('FurColor', { + 'BROWN': GraphQLEnumValue(0), + 'BLACK': GraphQLEnumValue(1), + 'TAN': GraphQLEnumValue(2), + 'SPOTTED': GraphQLEnumValue(3), + 'NO_FUR': GraphQLEnumValue(), + 'UNKNOWN': None}) + +ComplexInput = GraphQLInputObjectType('ComplexInput', { + 'requiredField': GraphQLInputField(GraphQLNonNull(GraphQLBoolean)), + 'nonNullField': GraphQLInputField( + GraphQLNonNull(GraphQLBoolean), default_value=False), + 'intField': GraphQLInputField(GraphQLInt), + 'stringField': GraphQLInputField(GraphQLString), + 'booleanField': GraphQLInputField(GraphQLBoolean), + 'stringListField': GraphQLInputField(GraphQLList(GraphQLString))}) + +ComplicatedArgs = GraphQLObjectType('ComplicatedArgs', { + 'intArgField': GraphQLField(GraphQLString, { + 'intArg': GraphQLArgument(GraphQLInt)}), + 'nonNullIntArgField': GraphQLField(GraphQLString, { + 'nonNullIntArg': GraphQLArgument(GraphQLNonNull(GraphQLInt))}), + 'stringArgField': GraphQLField(GraphQLString, { + 'stringArg': GraphQLArgument(GraphQLString)}), + 'booleanArgField': GraphQLField(GraphQLString, { + 'booleanArg': GraphQLArgument(GraphQLBoolean)}), + 'enumArgField': GraphQLField(GraphQLString, { + 'enumArg': GraphQLArgument(FurColor)}), + 'floatArgField': GraphQLField(GraphQLString, { + 'floatArg': GraphQLArgument(GraphQLFloat)}), + 'idArgField': GraphQLField(GraphQLString, { + 'idArg': GraphQLArgument(GraphQLID)}), + 'stringListArgField': GraphQLField(GraphQLString, { + 'stringListArg': GraphQLArgument(GraphQLList(GraphQLString))}), + 'stringListNonNullArgField': GraphQLField(GraphQLString, args={ + 'stringListNonNullArg': GraphQLArgument( + GraphQLList(GraphQLNonNull(GraphQLString)))}), + 'complexArgField': GraphQLField(GraphQLString, { + 'complexArg': GraphQLArgument(ComplexInput)}), + 'multipleReqs': GraphQLField(GraphQLString, { + 'req1': GraphQLArgument(GraphQLNonNull(GraphQLInt)), + 'req2': GraphQLArgument(GraphQLNonNull(GraphQLInt))}), + 'nonNullFieldWithDefault': GraphQLField(GraphQLString, { + 'arg': GraphQLArgument(GraphQLNonNull(GraphQLInt), default_value=0)}), + 'multipleOpts': GraphQLField(GraphQLString, { + 'opt1': GraphQLArgument(GraphQLInt, 0), + 'opt2': GraphQLArgument(GraphQLInt, 0)}), + 'multipleOptsAndReq': GraphQLField(GraphQLString, { + 'req1': GraphQLArgument(GraphQLNonNull(GraphQLInt)), + 'req2': GraphQLArgument(GraphQLNonNull(GraphQLInt)), + 'opt1': GraphQLArgument(GraphQLInt, 0), + 'opt2': GraphQLArgument(GraphQLInt, 0)})}) + + +def raise_type_error(message): + raise TypeError(message) + + +InvalidScalar = GraphQLScalarType( + name='Invalid', + serialize=lambda value: value, + parse_literal=lambda node: raise_type_error( + f'Invalid scalar is always invalid: {node.value}'), + parse_value=lambda node: raise_type_error( + f'Invalid scalar is always invalid: {node}')) + +AnyScalar = GraphQLScalarType( + name='Any', + serialize=lambda value: value, + parse_literal=lambda node: node, # Allows any value + parse_value=lambda value: value) # Allows any value + +QueryRoot = GraphQLObjectType('QueryRoot', { + 'human': GraphQLField(Human, { + 'id': GraphQLArgument(GraphQLID), + }), + 'dog': GraphQLField(Dog), + 'pet': GraphQLField(Pet), + 'alien': GraphQLField(Alien), + 'catOrDog': GraphQLField(CatOrDog), + 'humanOrAlien': GraphQLField(HumanOrAlien), + 'complicatedArgs': GraphQLField(ComplicatedArgs), + 'invalidArg': GraphQLField(GraphQLString, args={ + 'arg': GraphQLArgument(InvalidScalar)}), + 'anyArg': GraphQLField(GraphQLString, args={ + 'arg': GraphQLArgument(AnyScalar)})}) + +test_schema = GraphQLSchema( + query=QueryRoot, + directives=[ + GraphQLIncludeDirective, + GraphQLSkipDirective, + GraphQLDirective( + name='onQuery', + locations=[DirectiveLocation.QUERY]), + GraphQLDirective( + name='onMutation', + locations=[DirectiveLocation.MUTATION]), + GraphQLDirective( + name='onSubscription', + locations=[DirectiveLocation.SUBSCRIPTION]), + GraphQLDirective( + name='onField', + locations=[DirectiveLocation.FIELD]), + GraphQLDirective( + name='onFragmentDefinition', + locations=[DirectiveLocation.FRAGMENT_DEFINITION]), + GraphQLDirective( + name='onFragmentSpread', + locations=[DirectiveLocation.FRAGMENT_SPREAD]), + GraphQLDirective( + name='onInlineFragment', + locations=[DirectiveLocation.INLINE_FRAGMENT]), + GraphQLDirective( + name='onSchema', + locations=[DirectiveLocation.SCHEMA]), + GraphQLDirective( + name='onScalar', + locations=[DirectiveLocation.SCALAR]), + GraphQLDirective( + name='onObject', + locations=[DirectiveLocation.OBJECT]), + GraphQLDirective( + name='onFieldDefinition', + locations=[DirectiveLocation.FIELD_DEFINITION]), + GraphQLDirective( + name='onArgumentDefinition', + locations=[DirectiveLocation.ARGUMENT_DEFINITION]), + GraphQLDirective( + name='onInterface', + locations=[DirectiveLocation.INTERFACE]), + GraphQLDirective( + name='onUnion', + locations=[DirectiveLocation.UNION]), + GraphQLDirective( + name='onEnum', locations=[DirectiveLocation.ENUM]), + GraphQLDirective( + name='onEnumValue', + locations=[DirectiveLocation.ENUM_VALUE]), + GraphQLDirective( + name='onInputObject', + locations=[DirectiveLocation.INPUT_OBJECT]), + GraphQLDirective( + name='onInputFieldDefinition', + locations=[DirectiveLocation.INPUT_FIELD_DEFINITION])], + types=[Cat, Dog, Human, Alien]) + + +def expect_valid(schema, rules, query_string): + errors = validate(schema, parse(query_string), rules) + assert errors == [], 'Should validate' + + +def expect_invalid(schema, rules, query_string, expected_errors): + errors = validate(schema, parse(query_string), rules) + assert errors, 'Should not validate' + assert errors == expected_errors + return errors + + +def expect_passes_rule(rule, query_string): + return expect_valid(test_schema, [rule], query_string) + + +def expect_fails_rule(rule, query_string, errors): + return expect_invalid(test_schema, [rule], query_string, errors) + + +def expect_fails_rule_with_schema(schema, rule, query_string, errors): + return expect_invalid(schema, [rule], query_string, errors) + + +def expect_passes_rule_with_schema(schema, rule, query_string): + return expect_valid(schema, [rule], query_string) diff --git a/tests/validation/test_executable_definitions.py b/tests/validation/test_executable_definitions.py new file mode 100644 index 00000000..3d495aa8 --- /dev/null +++ b/tests/validation/test_executable_definitions.py @@ -0,0 +1,75 @@ +from graphql.validation import ExecutableDefinitionsRule +from graphql.validation.rules.executable_definitions import ( + non_executable_definitions_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def non_executable_definition( + def_name, line, column): + return { + 'message': non_executable_definitions_message(def_name), + 'locations': [(line, column)]} + + +def describe_validate_executable_definitions(): + + def with_only_operation(): + expect_passes_rule(ExecutableDefinitionsRule, """ + query Foo { + dog { + name + } + } + """) + + def with_operation_and_fragment(): + expect_passes_rule(ExecutableDefinitionsRule, """ + query Foo { + dog { + name + ...Frag + } + } + + fragment Frag on Dog { + name + } + """) + + def with_type_definition(): + expect_fails_rule(ExecutableDefinitionsRule, """ + query Foo { + dog { + name + } + } + + type Cow { + name: String + } + + extend type Dog { + color: String + } + """, [ + non_executable_definition('Cow', 8, 13), + non_executable_definition('Dog', 12, 13) + ]) + + def with_schema_definition(): + expect_fails_rule(ExecutableDefinitionsRule, """ + schema { + query: Query + } + + type Query { + test: String + } + + extend schema @directive + """, [ + non_executable_definition('schema', 2, 13), + non_executable_definition('Query', 6, 13), + non_executable_definition('schema', 10, 13), + ]) diff --git a/tests/validation/test_fields_on_correct_type.py b/tests/validation/test_fields_on_correct_type.py new file mode 100644 index 00000000..70fa124a --- /dev/null +++ b/tests/validation/test_fields_on_correct_type.py @@ -0,0 +1,226 @@ +from graphql.validation import FieldsOnCorrectTypeRule +from graphql.validation.rules.fields_on_correct_type import ( + undefined_field_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def undefined_field( + field, type_, suggested_types, suggested_fields, line, column): + return { + 'message': undefined_field_message( + field, type_, suggested_types, suggested_fields), + 'locations': [(line, column)]} + + +def describe_validate_fields_on_correct_type(): + + def object_field_selection(): + expect_passes_rule(FieldsOnCorrectTypeRule, """ + fragment objectFieldSelection on Dog { + __typename + name + } + """) + + def aliased_object_field_selection(): + expect_passes_rule(FieldsOnCorrectTypeRule, """ + fragment aliasedObjectFieldSelection on Dog { + tn : __typename + otherName : name + } + """) + + def interface_field_selection(): + expect_passes_rule(FieldsOnCorrectTypeRule, """ + fragment interfaceFieldSelection on Pet { + __typename + name + } + """) + + def aliased_interface_field_selection(): + expect_passes_rule(FieldsOnCorrectTypeRule, """ + fragment interfaceFieldSelection on Pet { + otherName : name + } + """) + + def lying_alias_selection(): + expect_passes_rule(FieldsOnCorrectTypeRule, """ + fragment lyingAliasSelection on Dog { + name : nickname + } + """) + + def ignores_fields_on_unknown_type(): + expect_passes_rule(FieldsOnCorrectTypeRule, """ + fragment unknownSelection on UnknownType { + unknownField + } + """) + + def reports_errors_when_type_is_known_again(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment typeKnownAgain on Pet { + unknown_pet_field { + ... on Cat { + unknown_cat_field + } + } + }, + """, [ + undefined_field('unknown_pet_field', 'Pet', [], [], 3, 15), + undefined_field('unknown_cat_field', 'Cat', [], [], 5, 19) + ]) + + def field_not_defined_on_fragment(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment fieldNotDefined on Dog { + meowVolume + } + """, [ + undefined_field('meowVolume', 'Dog', [], ['barkVolume'], 3, 15) + ]) + + def ignores_deeply_unknown_field(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment deepFieldNotDefined on Dog { + unknown_field { + deeper_unknown_field + } + } + """, [ + undefined_field('unknown_field', 'Dog', [], [], 3, 15) + ]) + + def sub_field_not_defined(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment subFieldNotDefined on Human { + pets { + unknown_field + } + } + """, [ + undefined_field('unknown_field', 'Pet', [], [], 4, 17) + ]) + + def field_not_defined_on_inline_fragment(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment fieldNotDefined on Pet { + ... on Dog { + meowVolume + } + } + """, [ + undefined_field('meowVolume', 'Dog', [], ['barkVolume'], 4, 17) + ]) + + def aliased_field_target_not_defined(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment aliasedFieldTargetNotDefined on Dog { + volume : mooVolume + } + """, [ + undefined_field('mooVolume', 'Dog', [], ['barkVolume'], 3, 15) + ]) + + def aliased_lying_field_target_not_defined(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment aliasedLyingFieldTargetNotDefined on Dog { + barkVolume : kawVolume + } + """, [ + undefined_field('kawVolume', 'Dog', [], ['barkVolume'], 3, 15) + ]) + + def not_defined_on_interface(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment notDefinedOnInterface on Pet { + tailLength + } + """, [ + undefined_field('tailLength', 'Pet', [], [], 3, 15) + ]) + + def defined_on_implementors_but_not_on_interface(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment definedOnImplementorsButNotInterface on Pet { + nickname + } + """, [ + undefined_field('nickname', 'Pet', ['Dog', 'Cat'], ['name'], 3, 15) + ]) + + def meta_field_selection_on_union(): + expect_passes_rule(FieldsOnCorrectTypeRule, """ + fragment directFieldSelectionOnUnion on CatOrDog { + __typename + } + """) + + def direct_field_selection_on_union(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment directFieldSelectionOnUnion on CatOrDog { + directField + } + """, [ + undefined_field('directField', 'CatOrDog', [], [], 3, 15) + ]) + + def defined_on_implementors_queried_on_union(): + expect_fails_rule(FieldsOnCorrectTypeRule, """ + fragment definedOnImplementorsQueriedOnUnion on CatOrDog { + name + } + """, [ + undefined_field( + 'name', 'CatOrDog', + ['Being', 'Pet', 'Canine', 'Dog', 'Cat'], [], 3, 15) + ]) + + def valid_field_in_inline_fragment(): + expect_passes_rule(FieldsOnCorrectTypeRule, """ + fragment objectFieldSelection on Pet { + ... on Dog { + name + } + ... { + name + } + } + """) + + +def describe_fields_on_correct_type_error_message(): + + def fields_correct_type_no_suggestion(): + assert undefined_field_message( + 'f', 'T', [], []) == "Cannot query field 'f' on type 'T'." + + def works_with_no_small_numbers_of_type_suggestion(): + assert undefined_field_message('f', 'T', ['A', 'B'], []) == ( + "Cannot query field 'f' on type 'T'." + " Did you mean to use an inline fragment on 'A' or 'B'?") + + def works_with_no_small_numbers_of_field_suggestion(): + assert undefined_field_message('f', 'T', [], ['z', 'y']) == ( + "Cannot query field 'f' on type 'T'." + " Did you mean 'z' or 'y'?") + + def only_shows_one_set_of_suggestions_at_a_time_preferring_types(): + assert undefined_field_message('f', 'T', ['A', 'B'], ['z', 'y']) == ( + "Cannot query field 'f' on type 'T'." + " Did you mean to use an inline fragment on 'A' or 'B'?") + + def limits_lots_of_type_suggestions(): + assert undefined_field_message( + 'f', 'T', ['A', 'B', 'C', 'D', 'E', 'F'], []) == ( + "Cannot query field 'f' on type 'T'. Did you mean to use" + " an inline fragment on 'A', 'B', 'C', 'D' or 'E'?") + + def limits_lots_of_field_suggestions(): + assert undefined_field_message( + 'f', 'T', [], ['z', 'y', 'x', 'w', 'v', 'u']) == ( + "Cannot query field 'f' on type 'T'." + " Did you mean 'z', 'y', 'x', 'w' or 'v'?") diff --git a/tests/validation/test_fragments_on_composite_types.py b/tests/validation/test_fragments_on_composite_types.py new file mode 100644 index 00000000..5a3ff6e0 --- /dev/null +++ b/tests/validation/test_fragments_on_composite_types.py @@ -0,0 +1,95 @@ +from graphql.type import GraphQLString +from graphql.validation import FragmentsOnCompositeTypesRule +from graphql.validation.rules.fragments_on_composite_types import ( + fragment_on_non_composite_error_message, + inline_fragment_on_non_composite_error_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def error(frag_name, type_name, line, column): + return { + 'message': fragment_on_non_composite_error_message( + frag_name, type_name), + 'locations': [(line, column)]} + + +def describe_validate_fragments_on_composite_types(): + + def object_is_valid_fragment_type(): + expect_passes_rule(FragmentsOnCompositeTypesRule, """ + fragment validFragment on Dog { + barks + } + """) + + def interface_is_valid_fragment_type(): + expect_passes_rule(FragmentsOnCompositeTypesRule, """ + fragment validFragment on Pet { + name + } + """) + + def object_is_valid_inline_fragment_type(): + expect_passes_rule(FragmentsOnCompositeTypesRule, """ + fragment validFragment on Pet { + ... on Dog { + barks + } + } + """) + + def inline_fragment_without_type_is_valid(): + expect_passes_rule(FragmentsOnCompositeTypesRule, """ + fragment validFragment on Pet { + ... { + name + } + } + """) + + def union_is_valid_fragment_type(): + expect_passes_rule(FragmentsOnCompositeTypesRule, """ + fragment validFragment on CatOrDog { + __typename + } + """) + + def scalar_is_invalid_fragment_type(): + expect_fails_rule(FragmentsOnCompositeTypesRule, """ + fragment scalarFragment on Boolean { + bad + } + """, [ + error('scalarFragment', 'Boolean', 2, 40) + ]) + + def enum_is_invalid_fragment_type(): + expect_fails_rule(FragmentsOnCompositeTypesRule, """ + fragment scalarFragment on FurColor { + bad + } + """, [ + error('scalarFragment', 'FurColor', 2, 40) + ]) + + def input_object_is_invalid_fragment_type(): + expect_fails_rule(FragmentsOnCompositeTypesRule, """ + fragment inputFragment on ComplexInput { + stringField + } + """, [ + error('inputFragment', 'ComplexInput', 2, 39) + ]) + + def scalar_is_invalid_inline_fragment_type(): + expect_fails_rule(FragmentsOnCompositeTypesRule, """ + fragment invalidFragment on Pet { + ... on String { + barks + } + } + """, [{ + 'message': inline_fragment_on_non_composite_error_message( + GraphQLString), 'locations': [(3, 22)] + }]) diff --git a/tests/validation/test_known_argument_names.py b/tests/validation/test_known_argument_names.py new file mode 100644 index 00000000..277a365a --- /dev/null +++ b/tests/validation/test_known_argument_names.py @@ -0,0 +1,146 @@ +from graphql.validation import KnownArgumentNamesRule +from graphql.validation.rules.known_argument_names import ( + unknown_arg_message, unknown_directive_arg_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def unknown_arg(arg_name, field_name, type_name, suggested_args, line, column): + return { + 'message': unknown_arg_message( + arg_name, field_name, type_name, suggested_args), + 'locations': [(line, column)]} + + +def unknown_directive_arg( + arg_name, directive_name, suggested_args, line, column): + return { + 'message': unknown_directive_arg_message( + arg_name, directive_name, suggested_args), + 'locations': [(line, column)]} + + +def describe_validate_known_argument_names(): + + def single_arg_is_known(): + expect_passes_rule(KnownArgumentNamesRule, """ + fragment argOnRequiredArg on Dog { + doesKnowCommand(dogCommand: SIT) + } + """) + + def multiple_args_are_known(): + expect_passes_rule(KnownArgumentNamesRule, """ + fragment multipleArgs on ComplicatedArgs { + multipleReqs(req1: 1, req2: 2) + } + """) + + def ignore_args_of_unknown_fields(): + expect_passes_rule(KnownArgumentNamesRule, """ + fragment argOnUnknownField on Dog { + unknownField(unknownArg: SIT) + } + """) + + def multiple_args_in_reverse_order_are_known(): + expect_passes_rule(KnownArgumentNamesRule, """ + fragment multipleArgsReverseOrder on ComplicatedArgs { + multipleReqs(req2: 2, req1: 1) + } + """) + + def no_args_on_optional_arg(): + expect_passes_rule(KnownArgumentNamesRule, """ + fragment noArgOnOptionalArg on Dog { + isHousetrained + } + """) + + def args_are_known_deeply(): + expect_passes_rule(KnownArgumentNamesRule, """ + { + dog { + doesKnowCommand(dogCommand: SIT) + } + human { + pet { + ... on Dog { + doesKnowCommand(dogCommand: SIT) + } + } + } + } + """) + + def directive_args_are_known(): + expect_passes_rule(KnownArgumentNamesRule, """ + { + dog @skip(if: true) + } + """) + + def undirective_args_are_invalid(): + expect_fails_rule(KnownArgumentNamesRule, """ + { + dog @skip(unless: true) + } + """, [ + unknown_directive_arg('unless', 'skip', [], 3, 25) + ]) + + def misspelled_directive_args_are_reported(): + expect_fails_rule(KnownArgumentNamesRule, """ + { + dog @skip(iff: true) + } + """, [ + unknown_directive_arg('iff', 'skip', ['if'], 3, 25) + ]) + + def invalid_arg_name(): + expect_fails_rule(KnownArgumentNamesRule, """ + fragment invalidArgName on Dog { + doesKnowCommand(unknown: true) + } + """, [ + unknown_arg('unknown', 'doesKnowCommand', 'Dog', [], 3, 31) + ]) + + def misspelled_args_name_is_reported(): + expect_fails_rule(KnownArgumentNamesRule, """ + fragment invalidArgName on Dog { + doesKnowCommand(dogcommand: true) + } + """, [unknown_arg( + 'dogcommand', 'doesKnowCommand', 'Dog', ['dogCommand'], 3, 31) + ]) + + def unknown_args_amongst_known_args(): + expect_fails_rule(KnownArgumentNamesRule, """ + fragment oneGoodArgOneInvalidArg on Dog { + doesKnowCommand(whoknows: 1, dogCommand: SIT, unknown: true) + } + """, [ + unknown_arg('whoknows', 'doesKnowCommand', 'Dog', [], 3, 31), + unknown_arg('unknown', 'doesKnowCommand', 'Dog', [], 3, 61) + ]) + + def unknown_args_deeply(): + expect_fails_rule(KnownArgumentNamesRule, """ + { + dog { + doesKnowCommand(unknown: true) + } + human { + pet { + ... on Dog { + doesKnowCommand(unknown: true) + } + } + } + } + """, [ + unknown_arg('unknown', 'doesKnowCommand', 'Dog', [], 4, 33), + unknown_arg('unknown', 'doesKnowCommand', 'Dog', [], 9, 37) + ]) diff --git a/tests/validation/test_known_directives.py b/tests/validation/test_known_directives.py new file mode 100644 index 00000000..4b3f733b --- /dev/null +++ b/tests/validation/test_known_directives.py @@ -0,0 +1,199 @@ +from graphql.validation import KnownDirectivesRule +from graphql.validation.rules.known_directives import ( + unknown_directive_message, misplaced_directive_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def unknown_directive(directive_name, line, column): + return { + 'message': unknown_directive_message(directive_name), + 'locations': [(line, column)]} + + +def misplaced_directive(directive_name, placement, line, column): + return { + 'message': misplaced_directive_message(directive_name, placement), + 'locations': [(line, column)]} + + +def describe_known_directives(): + + def with_no_directives(): + expect_passes_rule(KnownDirectivesRule, """ + query Foo { + name + ...Frag + } + + fragment Frag on Dog { + name + } + """) + + def with_known_directives(): + expect_passes_rule(KnownDirectivesRule, """ + { + dog @include(if: true) { + name + } + human @skip(if: false) { + name + } + } + """) + + def with_unknown_directive(): + expect_fails_rule(KnownDirectivesRule, """ + { + dog @unknown(directive: "value") { + name + } + } + """, [ + unknown_directive('unknown', 3, 19) + ]) + + def with_many_unknown_directives(): + expect_fails_rule(KnownDirectivesRule, """ + { + dog @unknown(directive: "value") { + name + } + human @unknown(directive: "value") { + name + pets @unknown(directive: "value") { + name + } + } + } + """, [ + unknown_directive('unknown', 3, 19), + unknown_directive('unknown', 6, 21), + unknown_directive('unknown', 8, 22) + ]) + + def with_well_placed_directives(): + expect_passes_rule(KnownDirectivesRule, """ + query Foo @onQuery{ + name @include(if: true) + ...Frag @include(if: true) + skippedField @skip(if: true) + ...SkippedFrag @skip(if: true) + } + + mutation Bar @onMutation { + someField + } + """) + + def with_misplaced_directives(): + expect_fails_rule(KnownDirectivesRule, """ + query Foo @include(if: true) { + name @onQuery + ...Frag @onQuery + } + + mutation Bar @onQuery { + someField + } + """, [ + misplaced_directive('include', 'query', 2, 23), + misplaced_directive('onQuery', 'field', 3, 20), + misplaced_directive('onQuery', 'fragment spread', 4, 23), + misplaced_directive('onQuery', 'mutation', 7, 26), + ]) + + def describe_within_schema_language(): + + # noinspection PyShadowingNames + def with_well_placed_directives(): + expect_passes_rule(KnownDirectivesRule, """ + type MyObj implements MyInterface @onObject { + myField(myArg: Int @onArgumentDefinition): String @onFieldDefinition + } + + extend type MyObj @onObject + + scalar MyScalar @onScalar + + extend scalar MyScalar @onScalar + + interface MyInterface @onInterface { + myField(myArg: Int @onArgumentDefinition): String @onFieldDefinition + } + + extend interface MyInterface @onInterface + + union MyUnion @onUnion = MyObj | Other + + extend union MyUnion @onUnion + + enum MyEnum @onEnum { + MY_VALUE @onEnumValue + } + + extend enum MyEnum @onEnum + + input MyInput @onInputObject { + myField: Int @onInputFieldDefinition + } + + extend input MyInput @onInputObject + + schema @onSchema { + query: MyQuery + } + + extend schema @onSchema + """) # noqa + + # noinspection PyShadowingNames + def with_misplaced_directives(): + expect_fails_rule(KnownDirectivesRule, """ + type MyObj implements MyInterface @onInterface { + myField(myArg: Int @onInputFieldDefinition): String @onInputFieldDefinition + } + + scalar MyScalar @onEnum + + interface MyInterface @onObject { + myField(myArg: Int @onInputFieldDefinition): String @onInputFieldDefinition + } + + union MyUnion @onEnumValue = MyObj | Other + + enum MyEnum @onScalar { + MY_VALUE @onUnion + } + + input MyInput @onEnum { + myField: Int @onArgumentDefinition + } + + schema @onObject { + query: MyQuery + } + + extend schema @onObject + """, [ # noqa + misplaced_directive('onInterface', 'object', 2, 51), + misplaced_directive( + 'onInputFieldDefinition', 'argument definition', 3, 38), + misplaced_directive( + 'onInputFieldDefinition', 'field definition', 3, 71), + misplaced_directive('onEnum', 'scalar', 6, 33), + misplaced_directive('onObject', 'interface', 8, 39), + misplaced_directive( + 'onInputFieldDefinition', 'argument definition', 9, 38), + misplaced_directive( + 'onInputFieldDefinition', 'field definition', 9, 71), + misplaced_directive('onEnumValue', 'union', 12, 31), + misplaced_directive('onScalar', 'enum', 14, 29), + misplaced_directive('onUnion', 'enum value', 15, 28), + misplaced_directive('onEnum', 'input object', 18, 31), + misplaced_directive( + 'onArgumentDefinition', 'input field definition', 19, 32), + misplaced_directive('onObject', 'schema', 22, 24), + misplaced_directive('onObject', 'schema', 26, 31) + ]) diff --git a/tests/validation/test_known_fragment_names.py b/tests/validation/test_known_fragment_names.py new file mode 100644 index 00000000..d8dd512a --- /dev/null +++ b/tests/validation/test_known_fragment_names.py @@ -0,0 +1,59 @@ +from graphql.validation import KnownFragmentNamesRule +from graphql.validation.rules.known_fragment_names import ( + unknown_fragment_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def undef_fragment(fragment_name, line, column): + return { + 'message': unknown_fragment_message(fragment_name), + 'locations': [(line, column)]} + + +def describe_validate_known_fragment_names(): + + def known_fragment_names_are_valid(): + expect_passes_rule(KnownFragmentNamesRule, """ + { + human(id: 4) { + ...HumanFields1 + ... on Human { + ...HumanFields2 + } + ... { + name + } + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + """) + + def unknown_fragment_names_are_invalid(): + expect_fails_rule(KnownFragmentNamesRule, """ + { + human(id: 4) { + ...UnknownFragment1 + ... on Human { + ...UnknownFragment2 + } + } + } + fragment HumanFields on Human { + name + ...UnknownFragment3 + } + """, [ + undef_fragment('UnknownFragment1', 4, 20), + undef_fragment('UnknownFragment2', 6, 22), + undef_fragment('UnknownFragment3', 12, 18), + ]) diff --git a/tests/validation/test_known_type_names.py b/tests/validation/test_known_type_names.py new file mode 100644 index 00000000..6b327a1b --- /dev/null +++ b/tests/validation/test_known_type_names.py @@ -0,0 +1,63 @@ +from graphql.validation import KnownTypeNamesRule +from graphql.validation.rules.known_type_names import unknown_type_message + +from .harness import expect_fails_rule, expect_passes_rule + + +def unknown_type(type_name, suggested_types, line, column): + return { + 'message': unknown_type_message(type_name, suggested_types), + 'locations': [(line, column)]} + + +def describe_validate_known_type_names(): + + def known_type_names_are_valid(): + expect_passes_rule(KnownTypeNamesRule, """ + query Foo($var: String, $required: [String!]!) { + user(id: 4) { + pets { ... on Pet { name }, ...PetFields, ... { name } } + } + } + fragment PetFields on Pet { + name + } + """) + + def unknown_type_names_are_invalid(): + expect_fails_rule(KnownTypeNamesRule, """ + query Foo($var: JumbledUpLetters) { + user(id: 4) { + name + pets { ... on Badger { name }, ...PetFields, ... { name } } + } + } + fragment PetFields on Peettt { + name + } + """, [ + unknown_type('JumbledUpLetters', [], 2, 29), + unknown_type('Badger', [], 5, 31), + unknown_type('Peettt', ['Pet'], 8, 35), + ]) + + def ignores_type_definitions(): + expect_fails_rule(KnownTypeNamesRule, """ + type NotInTheSchema { + field: FooBar + } + interface FooBar { + field: NotInTheSchema + } + union U = A | B + input Blob { + field: UnknownType + } + query Foo($var: NotInTheSchema) { + user(id: $var) { + id + } + } + """, [ + unknown_type('NotInTheSchema', [], 12, 29), + ]) diff --git a/tests/validation/test_lone_anonymous_operation.py b/tests/validation/test_lone_anonymous_operation.py new file mode 100644 index 00000000..3d3377ee --- /dev/null +++ b/tests/validation/test_lone_anonymous_operation.py @@ -0,0 +1,86 @@ +from graphql.validation import LoneAnonymousOperationRule +from graphql.validation.rules.lone_anonymous_operation import ( + anonymous_operation_not_alone_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def anon_not_alone(line, column): + return { + 'message': anonymous_operation_not_alone_message(), + 'locations': [(line, column)]} + + +def describe_validate_anonymous_operation_must_be_alone(): + + def no_operations(): + expect_passes_rule(LoneAnonymousOperationRule, """ + fragment fragA on Type { + field + } + """) + + def one_anon_operation(): + expect_passes_rule(LoneAnonymousOperationRule, """ + { + field + } + """) + + def multiple_named_operation(): + expect_passes_rule(LoneAnonymousOperationRule, """ + query Foo { + field + } + + query Bar { + field + } + """) + + def anon_operation_with_fragment(): + expect_passes_rule(LoneAnonymousOperationRule, """ + { + ...Foo + } + fragment Foo on Type { + field + } + """) + + def multiple_anon_operations(): + expect_fails_rule(LoneAnonymousOperationRule, """ + { + fieldA + } + { + fieldB + } + """, [ + anon_not_alone(2, 13), + anon_not_alone(5, 13), + ]) + + def anon_operation_with_a_mutation(): + expect_fails_rule(LoneAnonymousOperationRule, """ + { + fieldA + } + mutation Foo { + fieldB + } + """, [ + anon_not_alone(2, 13) + ]) + + def anon_operation_with_a_subscription(): + expect_fails_rule(LoneAnonymousOperationRule, """ + { + fieldA + } + subscription Foo { + fieldB + } + """, [ + anon_not_alone(2, 13) + ]) diff --git a/tests/validation/test_no_fragment_cycles.py b/tests/validation/test_no_fragment_cycles.py new file mode 100644 index 00000000..e8d16c2d --- /dev/null +++ b/tests/validation/test_no_fragment_cycles.py @@ -0,0 +1,172 @@ +from graphql.validation import NoFragmentCyclesRule +from graphql.validation.rules.no_fragment_cycles import cycle_error_message + +from .harness import expect_fails_rule, expect_passes_rule + + +def describe_validate_no_circular_fragment_spreads(): + + def single_reference_is_valid(): + expect_passes_rule(NoFragmentCyclesRule, """ + fragment fragA on Dog { ...fragB } + fragment fragB on Dog { name } + """) + + def spreading_twice_is_not_circular(): + expect_passes_rule(NoFragmentCyclesRule, """ + fragment fragA on Dog { ...fragB, ...fragB } + fragment fragB on Dog { name } + """) + + def spreading_twice_indirectly_is_not_circular(): + expect_passes_rule(NoFragmentCyclesRule, """ + fragment fragA on Dog { ...fragB, ...fragC } + fragment fragB on Dog { ...fragC } + fragment fragC on Dog { name } + """) + + def double_spread_within_abstract_types(): + expect_passes_rule(NoFragmentCyclesRule, """ + fragment nameFragment on Pet { + ... on Dog { name } + ... on Cat { name } + } + fragment spreadsInAnon on Pet { + ... on Dog { ...nameFragment } + ... on Cat { ...nameFragment } + } + """) + + def does_not_raise_false_positive_on_unknown_fragment(): + expect_passes_rule(NoFragmentCyclesRule, """ + fragment nameFragment on Pet { + ...UnknownFragment + } + """) + + def spreading_recursively_within_field_fails(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragA on Human { relatives { ...fragA } }, + """, [{ + 'message': cycle_error_message('fragA', []), + 'locations': [(2, 51)] + }]) + + def no_spreading_itself_directly(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragA on Dog { ...fragA } + """, [{ + 'message': cycle_error_message('fragA', []), + 'locations': [(2, 37)] + }]) + + def no_spreading_itself_directly_within_inline_fragment(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragA on Pet { + ... on Dog { + ...fragA + } + } + """, [{ + 'message': cycle_error_message('fragA', []), + 'locations': [(4, 17)] + }]) + + def no_spreading_itself_indirectly(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragA on Dog { ...fragB } + fragment fragB on Dog { ...fragA } + """, [{ + 'message': cycle_error_message('fragA', ['fragB']), + 'locations': [(2, 37), (3, 37)] + }]) + + def no_spreading_itself_indirectly_reports_opposite_order(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragB on Dog { ...fragA } + fragment fragA on Dog { ...fragB } + """, [{ + 'message': cycle_error_message('fragB', ['fragA']), + 'locations': [(2, 37), (3, 37)] + }]) + + def no_spreading_itself_indirectly_within_inline_fragment(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragA on Pet { + ... on Dog { + ...fragB + } + } + fragment fragB on Pet { + ... on Dog { + ...fragA + } + } + """, [{ + 'message': cycle_error_message('fragA', ['fragB']), + 'locations': [(4, 17), (9, 17)] + }]) + + def no_spreading_itself_deeply(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragA on Dog { ...fragB } + fragment fragB on Dog { ...fragC } + fragment fragC on Dog { ...fragO } + fragment fragX on Dog { ...fragY } + fragment fragY on Dog { ...fragZ } + fragment fragZ on Dog { ...fragO } + fragment fragO on Dog { ...fragP } + fragment fragP on Dog { ...fragA, ...fragX } + """, [{ + 'message': cycle_error_message( + 'fragA', ['fragB', 'fragC', 'fragO', 'fragP']), + 'locations': [(2, 37), (3, 37), (4, 37), (8, 37), (9, 37)], + 'path': None + }, { + 'message': cycle_error_message( + 'fragO', ['fragP', 'fragX', 'fragY', 'fragZ']), + 'locations': [(8, 37), (9, 47), (5, 37), (6, 37), (7, 37)], + 'path': None + }]) + + def no_spreading_itself_deeply_two_paths(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragA on Dog { ...fragB, ...fragC } + fragment fragB on Dog { ...fragA } + fragment fragC on Dog { ...fragA } + """, [{ + 'message': cycle_error_message('fragA', ['fragB']), + 'locations': [(2, 37), (3, 37)] + }, { + 'message': cycle_error_message('fragA', ['fragC']), + 'locations': [(2, 47), (4, 37)] + }]) + + def no_spreading_itself_deeply_two_paths_alt_traverse_order(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragA on Dog { ...fragC } + fragment fragB on Dog { ...fragC } + fragment fragC on Dog { ...fragA, ...fragB } + """, [{ + 'message': cycle_error_message('fragA', ['fragC']), + 'locations': [(2, 37), (4, 37)] + }, { + 'message': cycle_error_message('fragC', ['fragB']), + 'locations': [(4, 47), (3, 37)] + }]) + + def no_spreading_itself_deeply_and_immediately(): + expect_fails_rule(NoFragmentCyclesRule, """ + fragment fragA on Dog { ...fragB } + fragment fragB on Dog { ...fragB, ...fragC } + fragment fragC on Dog { ...fragA, ...fragB } + """, [{ + 'message': cycle_error_message('fragB', []), + 'locations': [(3, 37)] + }, { + 'message': cycle_error_message('fragA', ['fragB', 'fragC']), + 'locations': [(2, 37), (3, 47), (4, 37)] + }, { + 'message': cycle_error_message('fragB', ['fragC']), + 'locations': [(3, 47), (4, 47)] + }]) diff --git a/tests/validation/test_no_undefined_variables.py b/tests/validation/test_no_undefined_variables.py new file mode 100644 index 00000000..b688994b --- /dev/null +++ b/tests/validation/test_no_undefined_variables.py @@ -0,0 +1,269 @@ +from graphql.validation import NoUndefinedVariablesRule +from graphql.validation.rules.no_undefined_variables import ( + undefined_var_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def undef_var(var_name, l1, c1, op_name, l2, c2): + return { + 'message': undefined_var_message(var_name, op_name), + 'locations': [(l1, c1), (l2, c2)]} + + +def describe_validate_no_undefined_variables(): + + def all_variables_defined(): + expect_passes_rule(NoUndefinedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + field(a: $a, b: $b, c: $c) + } + """) + + def all_variables_deeply_defined(): + expect_passes_rule(NoUndefinedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + field(a: $a) { + field(b: $b) { + field(c: $c) + } + } + } + """) + + def all_variables_deeply_in_inline_fragments_defined(): + expect_passes_rule(NoUndefinedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + ... on Type { + field(a: $a) { + field(b: $b) { + ... on Type { + field(c: $c) + } + } + } + } + } + """) + + def all_variables_in_fragments_deeply_defined(): + expect_passes_rule(NoUndefinedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field(c: $c) + } + """) + + def variable_within_single_fragment_defined_in_multiple_operations(): + expect_passes_rule(NoUndefinedVariablesRule, """ + query Foo($a: String) { + ...FragA + } + query Bar($a: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) + } + """) + + def variable_within_fragments_defined_in_operations(): + expect_passes_rule(NoUndefinedVariablesRule, """ + query Foo($a: String) { + ...FragA + } + query Bar($b: String) { + ...FragB + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + """) + + def variable_within_recursive_fragment_defined(): + expect_passes_rule(NoUndefinedVariablesRule, """ + query Foo($a: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragA + } + } + """) + + def variable_not_defined(): + expect_fails_rule(NoUndefinedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + field(a: $a, b: $b, c: $c, d: $d) + } + """, [ + undef_var('d', 3, 45, 'Foo', 2, 13) + ]) + + def variable_not_defined_by_unnamed_query(): + expect_fails_rule(NoUndefinedVariablesRule, """ + { + field(a: $a) + } + """, [ + undef_var('a', 3, 24, '', 2, 13) + ]) + + def multiple_variables_not_defined(): + expect_fails_rule(NoUndefinedVariablesRule, """ + query Foo($b: String) { + field(a: $a, b: $b, c: $c) + } + """, [ + undef_var('a', 3, 24, 'Foo', 2, 13), + undef_var('c', 3, 38, 'Foo', 2, 13) + ]) + + def variable_in_fragment_not_defined_by_unnamed_query(): + expect_fails_rule(NoUndefinedVariablesRule, """ + { + ...FragA + } + fragment FragA on Type { + field(a: $a) + } + """, [ + undef_var('a', 6, 24, '', 2, 13) + ]) + + def variable_in_fragment_not_defined_by_operation(): + expect_fails_rule(NoUndefinedVariablesRule, """ + query Foo($a: String, $b: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field(c: $c) + } + """, [ + undef_var('c', 16, 24, 'Foo', 2, 13) + ]) + + def multiple_variables_in_fragments_not_defined(): + expect_fails_rule(NoUndefinedVariablesRule, """ + query Foo($b: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field(c: $c) + } + """, [ + undef_var('a', 6, 24, 'Foo', 2, 13), + undef_var('c', 16, 24, 'Foo', 2, 13) + ]) + + def single_variable_in_fragment_not_defined_by_multiple_operations(): + expect_fails_rule(NoUndefinedVariablesRule, """ + query Foo($a: String) { + ...FragAB + } + query Bar($a: String) { + ...FragAB + } + fragment FragAB on Type { + field(a: $a, b: $b) + } + """, [ + undef_var('b', 9, 31, 'Foo', 2, 13), + undef_var('b', 9, 31, 'Bar', 5, 13) + ]) + + def variables_in_fragment_not_defined_by_multiple_operations(): + expect_fails_rule(NoUndefinedVariablesRule, """ + query Foo($b: String) { + ...FragAB + } + query Bar($a: String) { + ...FragAB + } + fragment FragAB on Type { + field(a: $a, b: $b) + } + """, [ + undef_var('a', 9, 24, 'Foo', 2, 13), + undef_var('b', 9, 31, 'Bar', 5, 13) + ]) + + def variable_in_fragment_used_by_other_operation(): + expect_fails_rule(NoUndefinedVariablesRule, """ + query Foo($b: String) { + ...FragA + } + query Bar($a: String) { + ...FragB + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + """, [ + undef_var('a', 9, 24, 'Foo', 2, 13), + undef_var('b', 12, 24, 'Bar', 5, 13) + ]) + + def multiple_undefined_variables_produce_multiple_errors(): + expect_fails_rule(NoUndefinedVariablesRule, """ + query Foo($b: String) { + ...FragAB + } + query Bar($a: String) { + ...FragAB + } + fragment FragAB on Type { + field1(a: $a, b: $b) + ...FragC + field3(a: $a, b: $b) + } + fragment FragC on Type { + field2(c: $c) + } + """, [ + undef_var('a', 9, 25, 'Foo', 2, 13), + undef_var('a', 11, 25, 'Foo', 2, 13), + undef_var('c', 14, 25, 'Foo', 2, 13), + undef_var('b', 9, 32, 'Bar', 5, 13), + undef_var('b', 11, 32, 'Bar', 5, 13), + undef_var('c', 14, 25, 'Bar', 5, 13), + ]) diff --git a/tests/validation/test_no_unused_fragments.py b/tests/validation/test_no_unused_fragments.py new file mode 100644 index 00000000..255028aa --- /dev/null +++ b/tests/validation/test_no_unused_fragments.py @@ -0,0 +1,142 @@ +from graphql.validation import NoUnusedFragmentsRule +from graphql.validation.rules.no_unused_fragments import ( + unused_fragment_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def unused_frag(frag_name, line, column): + return { + 'message': unused_fragment_message(frag_name), + 'locations': [(line, column)]} + + +def describe_validate_no_unused_fragments(): + + def all_fragment_names_are_used(): + expect_passes_rule(NoUnusedFragmentsRule, """ + { + human(id: 4) { + ...HumanFields1 + ... on Human { + ...HumanFields2 + } + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + """) + + def all_fragment_names_are_used_by_multiple_operations(): + expect_passes_rule(NoUnusedFragmentsRule, """ + query Foo { + human(id: 4) { + ...HumanFields1 + } + } + query Bar { + human(id: 4) { + ...HumanFields2 + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + """) + + def contains_unknown_fragments(): + expect_fails_rule(NoUnusedFragmentsRule, """ + query Foo { + human(id: 4) { + ...HumanFields1 + } + } + query Bar { + human(id: 4) { + ...HumanFields2 + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + fragment Unused1 on Human { + name + } + fragment Unused2 on Human { + name + } + """, [ + unused_frag('Unused1', 22, 13), + unused_frag('Unused2', 25, 13), + ]) + + def contains_unknown_fragments_with_ref_cycle(): + expect_fails_rule(NoUnusedFragmentsRule, """ + query Foo { + human(id: 4) { + ...HumanFields1 + } + } + query Bar { + human(id: 4) { + ...HumanFields2 + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + fragment Unused1 on Human { + name + ...Unused2 + } + fragment Unused2 on Human { + name + ...Unused1 + } + """, [ + unused_frag('Unused1', 22, 13), + unused_frag('Unused2', 26, 13), + ]) + + def contains_unknown_and_undefined_fragments(): + expect_fails_rule(NoUnusedFragmentsRule, """ + query Foo { + human(id: 4) { + ...bar + } + } + fragment foo on Human { + name + } + """, [ + unused_frag('foo', 7, 13) + ]) diff --git a/tests/validation/test_no_unused_variables.py b/tests/validation/test_no_unused_variables.py new file mode 100644 index 00000000..0654f29b --- /dev/null +++ b/tests/validation/test_no_unused_variables.py @@ -0,0 +1,193 @@ +from graphql.validation import NoUnusedVariablesRule +from graphql.validation.rules.no_unused_variables import ( + unused_variable_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def unused_var(var_name, op_name, line, column): + return { + 'message': unused_variable_message(var_name, op_name), + 'locations': [(line, column)]} + + +def describe_validate_no_unused_variables(): + + def uses_all_variables(): + expect_passes_rule(NoUnusedVariablesRule, """ + query ($a: String, $b: String, $c: String) { + field(a: $a, b: $b, c: $c) + } + """) + + def uses_all_variables_deeply(): + expect_passes_rule(NoUnusedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + field(a: $a) { + field(b: $b) { + field(c: $c) + } + } + } + """) + + def uses_all_variables_deeply_in_inline_fragments(): + expect_passes_rule(NoUnusedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + ... on Type { + field(a: $a) { + field(b: $b) { + ... on Type { + field(c: $c) + } + } + } + } + } + """) + + def uses_all_variables_in_fragment(): + expect_passes_rule(NoUnusedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field(c: $c) + } + """) + + def variable_used_by_fragment_in_multiple_operations(): + expect_passes_rule(NoUnusedVariablesRule, """ + query Foo($a: String) { + ...FragA + } + query Bar($b: String) { + ...FragB + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + """) + + def variable_used_by_recursive_fragment(): + expect_passes_rule(NoUnusedVariablesRule, """ + query Foo($a: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragA + } + } + """) + + def variable_not_used(): + expect_fails_rule(NoUnusedVariablesRule, """ + query ($a: String, $b: String, $c: String) { + field(a: $a, b: $b) + } + """, [ + unused_var('c', None, 2, 44) + ]) + + def multiple_variables_not_used(): + expect_fails_rule(NoUnusedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + field(b: $b) + } + """, [ + unused_var('a', 'Foo', 2, 23), + unused_var('c', 'Foo', 2, 47) + ]) + + def variable_not_used_in_fragments(): + expect_fails_rule(NoUnusedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field + } + """, [ + unused_var('c', 'Foo', 2, 47) + ]) + + def multiple_variables_not_used_in_fragments(): + expect_fails_rule(NoUnusedVariablesRule, """ + query Foo($a: String, $b: String, $c: String) { + ...FragA + } + fragment FragA on Type { + field { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field + } + """, [ + unused_var('a', 'Foo', 2, 23), + unused_var('c', 'Foo', 2, 47) + ]) + + def variable_not_used_by_unreferenced_fragment(): + expect_fails_rule(NoUnusedVariablesRule, """ + query Foo($b: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + """, [ + unused_var('b', 'Foo', 2, 23), + ]) + + def variable_not_used_by_fragment_used_by_other_operation(): + expect_fails_rule(NoUnusedVariablesRule, """ + query Foo($b: String) { + ...FragA + } + query Bar($a: String) { + ...FragB + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + """, [ + unused_var('b', 'Foo', 2, 23), + unused_var('a', 'Bar', 5, 23), + ]) diff --git a/tests/validation/test_overlapping_fields_can_be_merged.py b/tests/validation/test_overlapping_fields_can_be_merged.py new file mode 100644 index 00000000..02f2792d --- /dev/null +++ b/tests/validation/test_overlapping_fields_can_be_merged.py @@ -0,0 +1,827 @@ +from graphql.type import ( + GraphQLField, GraphQLID, GraphQLInt, + GraphQLInterfaceType, GraphQLList, GraphQLNonNull, GraphQLObjectType, + GraphQLSchema, GraphQLString) +from graphql.validation import OverlappingFieldsCanBeMergedRule +from graphql.validation.rules.overlapping_fields_can_be_merged import ( + fields_conflict_message) + +from .harness import ( + expect_fails_rule, expect_fails_rule_with_schema, + expect_passes_rule, expect_passes_rule_with_schema) + + +def describe_validate_overlapping_fields_can_be_merged(): + + def unique_fields(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment uniqueFields on Dog { + name + nickname + } + """) + + def identical_fields(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment mergeIdenticalFields on Dog { + name + name + } + """) + + def identical_fields_with_identical_args(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment mergeIdenticalFieldsWithIdenticalArgs on Dog { + doesKnowCommand(dogCommand: SIT) + doesKnowCommand(dogCommand: SIT) + } + """) + + def identical_fields_with_identical_directives(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment mergeSameFieldsWithSameDirectives on Dog { + name @include(if: true) + name @include(if: true) + } + """) + + def different_args_with_different_aliases(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment differentArgsWithDifferentAliases on Dog { + knowsSit: doesKnowCommand(dogCommand: SIT) + knowsDown: doesKnowCommand(dogCommand: DOWN) + } + """) + + def different_directives_with_different_aliases(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment differentDirectivesWithDifferentAliases on Dog { + nameIfTrue: name @include(if: true) + nameIfFalse: name @include(if: false) + } + """) + + def different_skip_or_include_directives_accepted(): + # Note: Differing skip/include directives don't create an ambiguous + # return value and are acceptable in conditions where differing runtime + # values may have the same desired effect of including/skipping a field + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment differentDirectivesWithDifferentAliases on Dog { + name @include(if: true) + name @include(if: false) + } + """) + + def same_aliases_with_different_field_targets(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + fragment sameAliasesWithDifferentFieldTargets on Dog { + fido: name + fido: nickname + } + """, [{ + 'message': fields_conflict_message( + 'fido', 'name and nickname are different fields'), + 'locations': [(3, 15), (4, 15)], 'path': None + }]) + + def same_aliases_allowed_on_non_overlapping_fields(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment sameAliasesWithDifferentFieldTargets on Pet { + ... on Dog { + name + } + ... on Cat { + name: nickname + } + } + """) + + def alias_masking_direct_field_access(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + fragment aliasMaskingDirectFieldAccess on Dog { + name: nickname + name + } + """, [{ + 'message': fields_conflict_message( + 'name', 'nickname and name are different fields'), + 'locations': [(3, 15), (4, 15)] + }]) + + def different_args_second_adds_an_argument(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + fragment conflictingArgs on Dog { + doesKnowCommand + doesKnowCommand(dogCommand: HEEL) + } + """, [{ + 'message': fields_conflict_message( + 'doesKnowCommand', 'they have differing arguments'), + 'locations': [(3, 15), (4, 15)] + }]) + + def different_args_second_missing_an_argument(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + fragment conflictingArgs on Dog { + doesKnowCommand(dogCommand: SIT) + doesKnowCommand + } + """, [{ + 'message': fields_conflict_message( + 'doesKnowCommand', 'they have differing arguments'), + 'locations': [(3, 15), (4, 15)] + }]) + + def conflicting_args(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + fragment conflictingArgs on Dog { + doesKnowCommand(dogCommand: SIT) + doesKnowCommand(dogCommand: HEEL) + } + """, [{ + 'message': fields_conflict_message( + 'doesKnowCommand', 'they have differing arguments'), + 'locations': [(3, 15), (4, 15)] + }]) + + def allows_different_args_where_no_conflict_is_possible(): + # This is valid since no object can be both a "Dog" and a "Cat", thus + # these fields can never overlap. + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment conflictingArgs on Pet { + ... on Dog { + name(surname: true) + } + ... on Cat { + name + } + } + """) + + def encounters_conflict_in_fragments(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + { + ...A + ...B + } + fragment A on Type { + x: a + } + fragment B on Type { + x: b + } + """, [{ + 'message': fields_conflict_message( + 'x', 'a and b are different fields'), + 'locations': [(7, 15), (10, 15)] + }]) + + def reports_each_conflict_once(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + { + f1 { + ...A + ...B + } + f2 { + ...B + ...A + } + f3 { + ...A + ...B + x: c + } + } + fragment A on Type { + x: a + } + fragment B on Type { + x: b + } + """, [{ + 'message': fields_conflict_message( + 'x', 'a and b are different fields'), + 'locations': [(18, 15), (21, 15)] + }, { + 'message': fields_conflict_message( + 'x', 'c and a are different fields'), + 'locations': [(14, 17), (18, 15)] + }, { + 'message': fields_conflict_message( + 'x', 'c and b are different fields'), + 'locations': [(14, 17), (21, 15)] + }]) + + def deep_conflict(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + { + field { + x: a + }, + field { + x: b + } + } + """, [{ + 'message': fields_conflict_message('field', [ + ('x', 'a and b are different fields')]), + 'locations': [(3, 15), (4, 17), (6, 15), (7, 17)] + }]) + + def deep_conflict_with_multiple_issues(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + { + field { + x: a + y: c + }, + field { + x: b + y: d + } + } + """, [{ + 'message': fields_conflict_message('field', [ + ('x', 'a and b are different fields'), + ('y', 'c and d are different fields')]), + 'locations': [ + (3, 15), (4, 17), (5, 17), (7, 15), (8, 17), (9, 17)], + 'path': None + }]) + + def very_deep_conflict(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + { + field { + deepField { + x: a + } + }, + field { + deepField { + x: b + } + } + } + """, [{ + 'message': fields_conflict_message('field', [ + ('deepField', [('x', 'a and b are different fields')])]), + 'locations': [ + (3, 15), (4, 17), (5, 19), (8, 15), (9, 17), (10, 19)], + 'path': None + }]) + + def reports_deep_conflict_to_nearest_common_ancestor(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + { + field { + deepField { + x: a + } + deepField { + x: b + } + }, + field { + deepField { + y + } + } + } + """, [{ + 'message': fields_conflict_message('deepField', [ + ('x', 'a and b are different fields')]), + 'locations': [(4, 17), (5, 19), (7, 17), (8, 19)] + }]) + + def reports_deep_conflict_to_nearest_common_ancestor_in_fragments(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + { + field { + ...F + } + field { + ...F + } + } + fragment F on T { + deepField { + deeperField { + x: a + } + deeperField { + x: b + } + }, + deepField { + deeperField { + y + } + } + } + """, [{ + 'message': fields_conflict_message('deeperField', [ + ('x', 'a and b are different fields')]), + 'locations': [ + (12, 17), (13, 19), (15, 17), (16, 19)] + }]) + + def reports_deep_conflict_in_nested_fragments(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + { + field { + ...F + }, + field { + ...I + } + } + fragment F on T { + x: a + ...G + } + fragment G on T { + y: c + } + fragment I on T { + y: d + ...J + } + fragment J on T { + x: b + } + """, [{ + 'message': fields_conflict_message('field', [ + ('x', 'a and b are different fields'), + ('y', 'c and d are different fields')]), + 'locations': [ + (3, 15), (11, 15), (15, 15), (6, 15), (22, 15), (18, 15)], + 'path': None + }]) + + def ignores_unknown_fragments(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + { + field + ...Unknown + ...Known + } + + fragment Known on T { + field + ...OtherUnknown + } + """) + + def describe_return_types_must_be_unambiguous(): + + SomeBox = GraphQLInterfaceType('SomeBox', lambda: { + 'deepBox': GraphQLField(SomeBox), + 'unrelatedField': GraphQLField(GraphQLString)}) + + StringBox = GraphQLObjectType('StringBox', lambda: { + 'scalar': GraphQLField(GraphQLString), + 'deepBox': GraphQLField(StringBox), + 'unrelatedField': GraphQLField(GraphQLString), + 'listStringBox': GraphQLField(GraphQLList(StringBox)), + 'stringBox': GraphQLField(StringBox), + 'intBox': GraphQLField(IntBox)}, + interfaces=[SomeBox]) + + IntBox = GraphQLObjectType('IntBox', lambda: { + 'scalar': GraphQLField(GraphQLInt), + 'deepBox': GraphQLField(IntBox), + 'unrelatedField': GraphQLField(GraphQLString), + 'listStringBox': GraphQLField(GraphQLList(StringBox)), + 'stringBox': GraphQLField(StringBox), + 'intBox': GraphQLField(IntBox)}, + interfaces=[SomeBox]) + + NonNullStringBox1 = GraphQLInterfaceType('NonNullStringBox1', { + 'scalar': GraphQLField(GraphQLNonNull(GraphQLString))}) + + NonNullStringBox1Impl = GraphQLObjectType('NonNullStringBox1Impl', { + 'scalar': GraphQLField(GraphQLNonNull(GraphQLString)), + 'deepBox': GraphQLField(StringBox), + 'unrelatedField': GraphQLField(GraphQLString)}, + interfaces=[SomeBox, NonNullStringBox1]) + + NonNullStringBox2 = GraphQLInterfaceType('NonNullStringBox2', { + 'scalar': GraphQLField(GraphQLNonNull(GraphQLString))}) + + NonNullStringBox2Impl = GraphQLObjectType('NonNullStringBox2Impl', { + 'scalar': GraphQLField(GraphQLNonNull(GraphQLString)), + 'unrelatedField': GraphQLField(GraphQLString), + 'deepBox': GraphQLField(StringBox), + }, interfaces=[SomeBox, NonNullStringBox2]) + + Connection = GraphQLObjectType('Connection', { + 'edges': GraphQLField(GraphQLList(GraphQLObjectType('Edge', { + 'node': GraphQLField(GraphQLObjectType('Node', { + 'id': GraphQLField(GraphQLID), + 'name': GraphQLField(GraphQLString)}))})))}) + + schema = GraphQLSchema( + GraphQLObjectType('QueryRoot', { + 'someBox': GraphQLField(SomeBox), + 'connection': GraphQLField(Connection)}), + types=[IntBox, StringBox, + NonNullStringBox1Impl, NonNullStringBox2Impl]) + + def conflicting_return_types_which_potentially_overlap(): + # This is invalid since an object could potentially be both the + # Object type IntBox and the interface type NonNullStringBox1. + # While that condition does not exist in the current schema, the + # schema could expand in the future to allow this. + expect_fails_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ...on IntBox { + scalar + } + ...on NonNullStringBox1 { + scalar + } + } + } + """, [{ + 'message': fields_conflict_message( + 'scalar', + 'they return conflicting types Int and String!'), + 'locations': [(5, 27), (8, 27)] + }]) + + def compatible_return_shapes_on_different_return_types(): + # In this case `deepBox` returns `SomeBox` in the first usage, and + # `StringBox` in the second usage. These types are not the same! + # However this is valid because the return *shapes* are compatible. + expect_passes_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ... on SomeBox { + deepBox { + unrelatedField + } + } + ... on StringBox { + deepBox { + unrelatedField + } + } + } + } + """) + + def disallows_differing_return_types_despite_no_overlap(): + expect_fails_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ... on IntBox { + scalar + } + ... on StringBox { + scalar + } + } + } + """, [{ + 'message': fields_conflict_message( + 'scalar', + 'they return conflicting types Int and String'), + 'locations': [(5, 27), (8, 27)] + }]) + + def reports_correctly_when_a_non_exclusive_follows_an_exclusive(): + expect_fails_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ... on IntBox { + deepBox { + ...X + } + } + } + someBox { + ... on StringBox { + deepBox { + ...Y + } + } + } + memoed: someBox { + ... on IntBox { + deepBox { + ...X + } + } + } + memoed: someBox { + ... on StringBox { + deepBox { + ...Y + } + } + } + other: someBox { + ...X + } + other: someBox { + ...Y + } + } + fragment X on SomeBox { + scalar + } + fragment Y on SomeBox { + scalar: unrelatedField + } + """, [{ + 'message': fields_conflict_message('other', [ + ('scalar', + 'scalar and unrelatedField are different fields')]), + 'locations': [(31, 23), (39, 23), (34, 23), (42, 23)], + 'path': None + }]) + + def disallows_differing_return_type_nullability_despite_no_overlap(): + expect_fails_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ... on NonNullStringBox1 { + scalar + } + ... on StringBox { + scalar + } + } + } + """, [{ + 'message': fields_conflict_message( + 'scalar', + 'they return conflicting types String! and String'), + 'locations': [(5, 27), (8, 27)] + }]) + + def disallows_differing_return_type_list_despite_no_overlap_1(): + expect_fails_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ... on IntBox { + box: listStringBox { + scalar + } + } + ... on StringBox { + box: stringBox { + scalar + } + } + } + } + """, [{ + 'message': fields_conflict_message( + 'box', 'they return conflicting types' + ' [StringBox] and StringBox'), + 'locations': [(5, 27), (10, 27)] + }]) + + expect_fails_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ... on IntBox { + box: stringBox { + scalar + } + } + ... on StringBox { + box: listStringBox { + scalar + } + } + } + } + """, [{ + 'message': fields_conflict_message( + 'box', 'they return conflicting types' + ' StringBox and [StringBox]'), + 'locations': [(5, 27), (10, 27)] + }]) + + def disallows_differing_subfields(): + expect_fails_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ... on IntBox { + box: stringBox { + val: scalar + val: unrelatedField + } + } + ... on StringBox { + box: stringBox { + val: scalar + } + } + } + } + """, [{ + 'message': fields_conflict_message( + 'val', + 'scalar and unrelatedField are different fields'), + 'locations': [(6, 29), (7, 29)] + }]) + + def disallows_differing_deep_return_types_despite_no_overlap(): + expect_fails_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ... on IntBox { + box: stringBox { + scalar + } + } + ... on StringBox { + box: intBox { + scalar + } + } + } + } + """, [{ + 'message': fields_conflict_message('box', [ + ('scalar', + 'they return conflicting types String and Int')]), + 'locations': [(5, 27), (6, 29), (10, 27), (11, 29)], + 'path': None + }]) + + def allows_non_conflicting_overlapping_types(): + expect_passes_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ... on IntBox { + scalar: unrelatedField + } + ... on StringBox { + scalar + } + } + } + """) + + def same_wrapped_scalar_return_types(): + expect_passes_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ...on NonNullStringBox1 { + scalar + } + ...on NonNullStringBox2 { + scalar + } + } + } + """) + + def allows_inline_typeless_fragments(): + expect_passes_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + a + ... { + a + } + } + """) + + def compares_deep_types_including_list(): + expect_fails_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + connection { + ...edgeID + edges { + node { + id: name + } + } + } + } + + fragment edgeID on Connection { + edges { + node { + id + } + } + } + """, [{ + 'message': fields_conflict_message('edges', [ + ('node', [ + ('id', 'name and id are different fields')])]), + 'locations': [ + (5, 25), (6, 27), (7, 29), + (14, 23), (15, 25), (16, 27)], + 'path': None + }]) + + def ignores_unknown_types(): + expect_passes_rule_with_schema( + schema, OverlappingFieldsCanBeMergedRule, """ + { + someBox { + ...on UnknownType { + scalar + } + ...on NonNullStringBox2 { + scalar + } + } + } + """) + + def error_message_contains_hint_for_alias_conflict(): + # The error template should end with a hint for the user to try + # using different aliases. + error = fields_conflict_message( + 'x', 'a and b are different fields') + assert error == ( + "Fields 'x' conflict because a and b are different fields." + ' Use different aliases on the fields to fetch both' + ' if this was intentional.') + + def works_for_field_names_that_are_js_keywords(): + FooType = GraphQLObjectType('Foo', { + 'constructor': GraphQLField(GraphQLString)}) + + schema_with_keywords = GraphQLSchema( + GraphQLObjectType('query', lambda: { + 'foo': GraphQLField(FooType)})) + + expect_passes_rule_with_schema( + schema_with_keywords, OverlappingFieldsCanBeMergedRule, """ + { + foo { + constructor + } + } + """) + + def works_for_field_names_that_are_python_keywords(): + FooType = GraphQLObjectType('Foo', { + 'class': GraphQLField(GraphQLString)}) + + schema_with_keywords = GraphQLSchema( + GraphQLObjectType('query', lambda: { + 'foo': GraphQLField(FooType)})) + + expect_passes_rule_with_schema( + schema_with_keywords, OverlappingFieldsCanBeMergedRule, """ + { + foo { + class + } + } + """) + + def does_not_infinite_loop_on_recursive_fragments(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment fragA on Human { name, relatives { name, ...fragA } } + """) + + def does_not_infinite_loop_on_immediately_recursive_fragments(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment fragA on Human { name, ...fragA } + """) + + def does_not_infinite_loop_on_transitively_recursive_fragments(): + expect_passes_rule(OverlappingFieldsCanBeMergedRule, """ + fragment fragA on Human { name, ...fragB } + fragment fragB on Human { name, ...fragC } + fragment fragC on Human { name, ...fragA } + """) + + def finds_invalid_case_even_with_immediately_recursive_fragment(): + expect_fails_rule(OverlappingFieldsCanBeMergedRule, """ + fragment sameAliasesWithDifferentFieldTargets on Dog { + ...sameAliasesWithDifferentFieldTargets + fido: name + fido: nickname + } + """, [{ + 'message': fields_conflict_message( + 'fido', 'name and nickname are different fields'), + 'locations': [(4, 15), (5, 15)] + }]) diff --git a/tests/validation/test_possible_fragment_spreads.py b/tests/validation/test_possible_fragment_spreads.py new file mode 100644 index 00000000..274742a9 --- /dev/null +++ b/tests/validation/test_possible_fragment_spreads.py @@ -0,0 +1,182 @@ +from graphql.validation import PossibleFragmentSpreadsRule +from graphql.validation.rules.possible_fragment_spreads import ( + type_incompatible_spread_message, + type_incompatible_anon_spread_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def error(frag_name, parent_type, frag_type, line, column): + return { + 'message': type_incompatible_spread_message( + frag_name, parent_type, frag_type), + 'locations': [(line, column)]} + + +def error_anon(parent_type, frag_type, line, column): + return { + 'message': type_incompatible_anon_spread_message( + parent_type, frag_type), + 'locations': [(line, column)]} + + +def describe_validate_possible_fragment_spreads(): + + def of_the_same_object(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment objectWithinObject on Dog { ...dogFragment } + fragment dogFragment on Dog { barkVolume } + """) + + def of_the_same_object_inline_fragment(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment objectWithinObjectAnon on Dog { ... on Dog { barkVolume } } + """) # noqa + + def object_into_implemented_interface(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment objectWithinInterface on Pet { ...dogFragment } + fragment dogFragment on Dog { barkVolume } + """) + + def object_into_containing_union(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment objectWithinUnion on CatOrDog { ...dogFragment } + fragment dogFragment on Dog { barkVolume } + """) + + def union_into_contained_object(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment unionWithinObject on Dog { ...catOrDogFragment } + fragment catOrDogFragment on CatOrDog { __typename } + """) + + def union_into_overlapping_interface(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment unionWithinInterface on Pet { ...catOrDogFragment } + fragment catOrDogFragment on CatOrDog { __typename } + """) + + def union_into_overlapping_union(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment unionWithinUnion on DogOrHuman { ...catOrDogFragment } + fragment catOrDogFragment on CatOrDog { __typename } + """) + + def interface_into_implemented_object(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment interfaceWithinObject on Dog { ...petFragment } + fragment petFragment on Pet { name } + """) + + def interface_into_overlapping_interface(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment interfaceWithinInterface on Pet { ...beingFragment } + fragment beingFragment on Being { name } + """) + + def interface_into_overlapping_interface_in_inline_fragment(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment interfaceWithinInterface on Pet { ... on Being { name } } + """) + + def interface_into_overlapping_union(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment interfaceWithinUnion on CatOrDog { ...petFragment } + fragment petFragment on Pet { name } + """) + + def ignores_incorrect_type_caught_by_fragments_on_composite_types(): + expect_passes_rule(PossibleFragmentSpreadsRule, """ + fragment petFragment on Pet { ...badInADifferentWay } + fragment badInADifferentWay on String { name } + """) + + def different_object_into_object(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidObjectWithinObject on Cat { ...dogFragment } + fragment dogFragment on Dog { barkVolume } + """, [ + error('dogFragment', 'Cat', 'Dog', 2, 57) + ]) + + def different_object_into_object_in_inline_fragment(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidObjectWithinObjectAnon on Cat { + ... on Dog { barkVolume } + } + """, [ + error_anon('Cat', 'Dog', 3, 15) + ]) + + def object_into_not_implementing_interface(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidObjectWithinInterface on Pet { ...humanFragment } + fragment humanFragment on Human { pets { name } } + """, [ + error('humanFragment', 'Pet', 'Human', 2, 60) + ]) + + def object_into_not_containing_union(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidObjectWithinUnion on CatOrDog { ...humanFragment } + fragment humanFragment on Human { pets { name } } + """, [error('humanFragment', 'CatOrDog', 'Human', 2, 61)]) + + def union_into_not_contained_object(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidUnionWithinObject on Human { ...catOrDogFragment } + fragment catOrDogFragment on CatOrDog { __typename } + """, [ + error('catOrDogFragment', 'Human', 'CatOrDog', 2, 58)]) + + def union_into_non_overlapping_interface(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidUnionWithinInterface on Pet { ...humanOrAlienFragment } + fragment humanOrAlienFragment on HumanOrAlien { __typename } + """, [ # noqa + error('humanOrAlienFragment', 'Pet', 'HumanOrAlien', 2, 59) + ]) + + def union_into_non_overlapping_union(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidUnionWithinUnion on CatOrDog { ...humanOrAlienFragment } + fragment humanOrAlienFragment on HumanOrAlien { __typename } + """, [ # noqa + error('humanOrAlienFragment', 'CatOrDog', 'HumanOrAlien', 2, 60) + ]) + + def interface_into_non_implementing_object(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidInterfaceWithinObject on Cat { ...intelligentFragment } + fragment intelligentFragment on Intelligent { iq } + """, [ # noqa + error('intelligentFragment', 'Cat', 'Intelligent', 2, 60) + ]) + + def interface_into_non_overlapping_interface(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidInterfaceWithinInterface on Pet { + ...intelligentFragment + } + fragment intelligentFragment on Intelligent { iq } + """, [ + error('intelligentFragment', 'Pet', 'Intelligent', 3, 15) + ]) + + def interface_into_non_overlapping_interface_in_inline_fragment(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidInterfaceWithinInterfaceAnon on Pet { + ...on Intelligent { iq } + } + """, [ + error_anon('Pet', 'Intelligent', 3, 15) + ]) + + def interface_into_non_overlapping_union(): + expect_fails_rule(PossibleFragmentSpreadsRule, """ + fragment invalidInterfaceWithinUnion on HumanOrAlien { ...petFragment } + fragment petFragment on Pet { name } + """, [ # noqa + error('petFragment', 'HumanOrAlien', 'Pet', 2, 68) + ]) diff --git a/tests/validation/test_provided_required_arguments.py b/tests/validation/test_provided_required_arguments.py new file mode 100644 index 00000000..4ef69d10 --- /dev/null +++ b/tests/validation/test_provided_required_arguments.py @@ -0,0 +1,196 @@ +from graphql.validation import ProvidedRequiredArgumentsRule +from graphql.validation.rules.provided_required_arguments import ( + missing_field_arg_message, missing_directive_arg_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def missing_field_arg(field_name, arg_name, type_name, line, column): + return { + 'message': missing_field_arg_message(field_name, arg_name, type_name), + 'locations': [(line, column)]} + + +def missing_directive_arg(directive_name, arg_name, type_name, line, column): + return { + 'message': missing_directive_arg_message( + directive_name, arg_name, type_name), + 'locations': [(line, column)]} + + +def describe_validate_provided_required_arguments(): + + def ignores_unknown_arguments(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + dog { + isHousetrained(unknownArgument: true) + } + }""") + + def describe_valid_non_nullable_value(): + + def arg_on_optional_arg(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + dog { + isHousetrained(atOtherHomes: true) + } + }""") + + def no_arg_on_optional_arg(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + dog { + isHousetrained + } + }""") + + def no_arg_on_non_null_field_with_default(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + nonNullFieldWithDefault + } + }""") + + def multiple_args(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleReqs(req1: 1, req2: 2) + } + } + """) + + def multiple_args_reverse_order(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleReqs(req2: 2, req1: 1) + } + } + """) + + def no_args_on_multiple_optional(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleOpts + } + } + """) + + def one_arg_on_multiple_optional(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleOpts(opt1: 1) + } + } + """) + + def second_arg_on_multiple_optional(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleOpts(opt2: 1) + } + } + """) + + def multiple_reqs_on_mixed_list(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4) + } + } + """) + + def multiple_reqs_and_one_opt_on_mixed_list(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4, opt1: 5) + } + } + """) + + def all_reqs_and_opts_on_mixed_list(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6) + } + } + """) + + def describe_invalid_non_nullable_value(): + + def missing_one_non_nullable_argument(): + expect_fails_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleReqs(req2: 2) + } + } + """, [ + missing_field_arg('multipleReqs', 'req1', 'Int!', 4, 21) + ]) + + def missing_multiple_non_nullable_arguments(): + expect_fails_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleReqs + } + } + """, [ + missing_field_arg('multipleReqs', 'req1', 'Int!', 4, 21), + missing_field_arg('multipleReqs', 'req2', 'Int!', 4, 21) + ]) + + def incorrect_value_and_missing_argument(): + expect_fails_rule(ProvidedRequiredArgumentsRule, """ + { + complicatedArgs { + multipleReqs(req1: "one") + } + } + """, [ + missing_field_arg('multipleReqs', 'req2', 'Int!', 4, 21) + ]) + + def describe_directive_arguments(): + + def ignores_unknown_directives(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + dog @unknown + } + """) + + def with_directives_of_valid_type(): + expect_passes_rule(ProvidedRequiredArgumentsRule, """ + { + dog @include(if: true) { + name + } + human @skip(if: false) { + name + } + } + """) + + def with_directive_with_missing_types(): + expect_fails_rule(ProvidedRequiredArgumentsRule, """ + { + dog @include { + name @skip + } + } + """, [ + missing_directive_arg('include', 'if', 'Boolean!', 3, 23), + missing_directive_arg('skip', 'if', 'Boolean!', 4, 26), + ]) diff --git a/tests/validation/test_scalar_leafs.py b/tests/validation/test_scalar_leafs.py new file mode 100644 index 00000000..6168c9c9 --- /dev/null +++ b/tests/validation/test_scalar_leafs.py @@ -0,0 +1,97 @@ +from graphql.validation import ScalarLeafsRule +from graphql.validation.rules.scalar_leafs import ( + no_subselection_allowed_message, required_subselection_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def no_scalar_subselection(field, type_, line, column): + return { + 'message': no_subselection_allowed_message(field, type_), + 'locations': [(line, column)]} + + +def missing_obj_subselection(field, type_, line, column): + return { + 'message': required_subselection_message(field, type_), + 'locations': [(line, column)]} + + +def describe_validate_scalar_leafs(): + + def valid_scalar_selection(): + expect_passes_rule(ScalarLeafsRule, """ + fragment scalarSelection on Dog { + barks + } + """) + + def object_type_missing_selection(): + expect_fails_rule(ScalarLeafsRule, """ + query directQueryOnObjectWithoutSubFields { + human + } + """, [ + missing_obj_subselection('human', 'Human', 3, 15) + ]) + + def interface_type_missing_selection(): + expect_fails_rule(ScalarLeafsRule, """ + { + human { pets } + } + """, [ + missing_obj_subselection('pets', '[Pet]', 3, 23) + ]) + + def valid_scalar_selection_with_args(): + expect_passes_rule(ScalarLeafsRule, """ + fragment scalarSelectionWithArgs on Dog { + doesKnowCommand(dogCommand: SIT) + } + """) + + def scalar_selection_not_allowed_on_boolean(): + expect_fails_rule(ScalarLeafsRule, """ + fragment scalarSelectionsNotAllowedOnBoolean on Dog { + barks { sinceWhen } + } + """, [ + no_scalar_subselection('barks', 'Boolean', 3, 21) + ]) + + def scalar_selection_not_allowed_on_enum(): + expect_fails_rule(ScalarLeafsRule, """ + fragment scalarSelectionsNotAllowedOnEnum on Cat { + furColor { inHexdec } + } + """, [ + no_scalar_subselection('furColor', 'FurColor', 3, 24) + ]) + + def scalar_selection_not_allowed_with_args(): + expect_fails_rule(ScalarLeafsRule, """ + fragment scalarSelectionsNotAllowedWithArgs on Dog { + doesKnowCommand(dogCommand: SIT) { sinceWhen } + } + """, [ + no_scalar_subselection('doesKnowCommand', 'Boolean', 3, 48) + ]) + + def scalar_selection_not_allowed_with_directives(): + expect_fails_rule(ScalarLeafsRule, """ + fragment scalarSelectionsNotAllowedWithDirectives on Dog { + name @include(if: true) { isAlsoHumanName } + } + """, [ + no_scalar_subselection('name', 'String', 3, 39) + ]) + + def scalar_selection_not_allowed_with_directives_and_args(): + expect_fails_rule(ScalarLeafsRule, """ + fragment scalarSelectionsNotAllowedWithDirectivesAndArgs on Dog { + doesKnowCommand(dogCommand: SIT) @include(if: true) { sinceWhen } + } + """, [ + no_scalar_subselection('doesKnowCommand', 'Boolean', 3, 67) + ]) diff --git a/tests/validation/test_single_field_subscriptions.py b/tests/validation/test_single_field_subscriptions.py new file mode 100644 index 00000000..69641461 --- /dev/null +++ b/tests/validation/test_single_field_subscriptions.py @@ -0,0 +1,60 @@ +from graphql.validation import SingleFieldSubscriptionsRule +from graphql.validation.rules.single_field_subscriptions import ( + single_field_only_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def describe_validate_subscriptions_with_single_field(): + + def valid_subscription(): + expect_passes_rule(SingleFieldSubscriptionsRule, """ + subscription ImportantEmails { + importantEmails + } + """) + + def fails_with_more_than_one_root_field(): + expect_fails_rule(SingleFieldSubscriptionsRule, """ + subscription ImportantEmails { + importantEmails + notImportantEmails + } + """, [{ + 'message': single_field_only_message('ImportantEmails'), + 'locations': [(4, 15)] + }]) + + def fails_with_more_than_one_root_field_including_introspection(): + expect_fails_rule(SingleFieldSubscriptionsRule, """ + subscription ImportantEmails { + importantEmails + __typename + } + """, [{ + 'message': single_field_only_message('ImportantEmails'), + 'locations': [(4, 15)] + }]) + + def fails_with_many_more_than_one_root_field(): + expect_fails_rule(SingleFieldSubscriptionsRule, """ + subscription ImportantEmails { + importantEmails + notImportantEmails + spamEmails + } + """, [{ + 'message': single_field_only_message('ImportantEmails'), + 'locations': [(4, 15), (5, 15)] + }]) + + def fails_with_more_than_one_root_field_in_anonymous_subscriptions(): + expect_fails_rule(SingleFieldSubscriptionsRule, """ + subscription { + importantEmails + notImportantEmails + } + """, [{ + 'message': single_field_only_message(None), + 'locations': [(4, 15)] + }]) diff --git a/tests/validation/test_unique_argument_names.py b/tests/validation/test_unique_argument_names.py new file mode 100644 index 00000000..951ed5fc --- /dev/null +++ b/tests/validation/test_unique_argument_names.py @@ -0,0 +1,116 @@ +from graphql.validation import UniqueArgumentNamesRule +from graphql.validation.rules.unique_argument_names import ( + duplicate_arg_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def duplicate_arg(arg_name, l1, c1, l2, c2): + return { + 'message': duplicate_arg_message(arg_name), + 'locations': [(l1, c1), (l2, c2)]} + + +def describe_validate_unique_argument_names(): + + def no_arguments_on_field(): + expect_passes_rule(UniqueArgumentNamesRule, """ + { + field + } + """) + + def no_arguments_on_directive(): + expect_passes_rule(UniqueArgumentNamesRule, """ + { + field + } + """) + + def argument_on_field(): + expect_passes_rule(UniqueArgumentNamesRule, """ + { + field(arg: "value") + } + """) + + def argument_on_directive(): + expect_passes_rule(UniqueArgumentNamesRule, """ + { + field @directive(arg: "value") + } + """) + + def same_argument_on_two_fields(): + expect_passes_rule(UniqueArgumentNamesRule, """ + { + one: field(arg: "value") + two: field(arg: "value") + } + """) + + def same_argument_on_field_and_directive(): + expect_passes_rule(UniqueArgumentNamesRule, """ + { + field(arg: "value") @directive(arg: "value") + } + """) + + def same_argument_on_two_directives(): + expect_passes_rule(UniqueArgumentNamesRule, """ + { + field @directive1(arg: "value") @directive2(arg: "value") + } + """) + + def multiple_field_arguments(): + expect_passes_rule(UniqueArgumentNamesRule, """ + { + field(arg1: "value", arg2: "value", arg3: "value") + } + """) + + def multiple_directive_arguments(): + expect_passes_rule(UniqueArgumentNamesRule, """ + { + field @directive(arg1: "value", arg2: "value", arg3: "value") + } + """) + + def duplicate_field_arguments(): + expect_fails_rule(UniqueArgumentNamesRule, """ + { + field(arg1: "value", arg1: "value") + } + """, [ + duplicate_arg('arg1', 3, 21, 3, 36) + ]) + + def many_duplicate_field_arguments(): + expect_fails_rule(UniqueArgumentNamesRule, """ + { + field(arg1: "value", arg1: "value", arg1: "value") + } + """, [ + duplicate_arg('arg1', 3, 21, 3, 36), + duplicate_arg('arg1', 3, 21, 3, 51) + ]) + + def duplicate_directive_arguments(): + expect_fails_rule(UniqueArgumentNamesRule, """ + { + field @directive(arg1: "value", arg1: "value") + } + """, [ + duplicate_arg('arg1', 3, 32, 3, 47) + ]) + + def many_duplicate_directive_arguments(): + expect_fails_rule(UniqueArgumentNamesRule, """ + { + field @directive(arg1: "value", arg1: "value", arg1: "value") + } + """, [ + duplicate_arg('arg1', 3, 32, 3, 47), + duplicate_arg('arg1', 3, 32, 3, 62) + ]) diff --git a/tests/validation/test_unique_directives_per_location.py b/tests/validation/test_unique_directives_per_location.py new file mode 100644 index 00000000..a538bf18 --- /dev/null +++ b/tests/validation/test_unique_directives_per_location.py @@ -0,0 +1,80 @@ +from graphql.validation import UniqueDirectivesPerLocationRule +from graphql.validation.rules.unique_directives_per_location import ( + duplicate_directive_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def duplicate_directive(directive_name, l1, c1, l2, c2): + return { + 'message': duplicate_directive_message(directive_name), + 'locations': [(l1, c1), (l2, c2)]} + + +def describe_validate_directives_are_unique_per_location(): + + def no_directives(): + expect_passes_rule(UniqueDirectivesPerLocationRule, """ + { + field + } + """) + + def unique_directives_in_different_locations(): + expect_passes_rule(UniqueDirectivesPerLocationRule, """ + fragment Test on Type @directiveA { + field @directiveB + } + """) + + def unique_directives_in_same_locations(): + expect_passes_rule(UniqueDirectivesPerLocationRule, """ + fragment Test on Type @directiveA @directiveB { + field @directiveA @directiveB + } + """) + + def same_directives_in_different_locations(): + expect_passes_rule(UniqueDirectivesPerLocationRule, """ + fragment Test on Type @directiveA { + field @directiveA + } + """) + + def same_directives_in_similar_locations(): + expect_passes_rule(UniqueDirectivesPerLocationRule, """ + fragment Test on Type { + field @directive + field @directive + } + """) + + def duplicate_directives_in_one_location(): + expect_fails_rule(UniqueDirectivesPerLocationRule, """ + fragment Test on Type { + field @directive @directive @directive + } + """, [ + duplicate_directive('directive', 3, 21, 3, 32), + duplicate_directive('directive', 3, 21, 3, 43), + ]) + + def different_duplicate_directives_in_one_location(): + expect_fails_rule(UniqueDirectivesPerLocationRule, """ + fragment Test on Type { + field @directiveA @directiveB @directiveA @directiveB + } + """, [ + duplicate_directive('directiveA', 3, 21, 3, 45), + duplicate_directive('directiveB', 3, 33, 3, 57), + ]) + + def different_duplicate_directives_in_many_locations(): + expect_fails_rule(UniqueDirectivesPerLocationRule, """ + fragment Test on Type @directive @directive { + field @directive @directive + } + """, [ + duplicate_directive('directive', 2, 35, 2, 46), + duplicate_directive('directive', 3, 21, 3, 32), + ]) diff --git a/tests/validation/test_unique_fragment_names.py b/tests/validation/test_unique_fragment_names.py new file mode 100644 index 00000000..f6536720 --- /dev/null +++ b/tests/validation/test_unique_fragment_names.py @@ -0,0 +1,98 @@ +from graphql.validation import UniqueFragmentNamesRule +from graphql.validation.rules.unique_fragment_names import ( + duplicate_fragment_name_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def duplicate_fragment(frag_name, l1, c1, l2, c2): + return { + 'message': duplicate_fragment_name_message(frag_name), + 'locations': [(l1, c1), (l2, c2)]} + + +def describe_validate_unique_fragment_names(): + + def no_fragments(): + expect_passes_rule(UniqueFragmentNamesRule, """ + { + field + } + """) + + def one_fragment(): + expect_passes_rule(UniqueFragmentNamesRule, """ + { + ...fragA + } + fragment fragA on Type { + field + } + """) + + def many_fragments(): + expect_passes_rule(UniqueFragmentNamesRule, """ + { + ...fragA + ...fragB + ...fragC + } + fragment fragA on Type { + fieldA + } + fragment fragB on Type { + fieldB + } + fragment fragC on Type { + fieldC + } + """) + + def inline_fragments_are_always_unique(): + expect_passes_rule(UniqueFragmentNamesRule, """ + { + ...on Type { + fieldA + } + ...on Type { + fieldB + } + } + """) + + def fragment_and_operation_named_the_same(): + expect_passes_rule(UniqueFragmentNamesRule, """ + query Foo { + ...Foo + } + fragment Foo on Type { + field + } + """) + + def fragments_named_the_same(): + expect_fails_rule(UniqueFragmentNamesRule, """ + { + ...fragA + } + fragment fragA on Type { + fieldA + } + fragment fragA on Type { + fieldB + } + """, [ + duplicate_fragment('fragA', 5, 24, 8, 24) + ]) + + def fragments_named_the_same_without_being_referenced(): + expect_fails_rule(UniqueFragmentNamesRule, """ + fragment fragA on Type { + fieldA + } + fragment fragA on Type { + fieldB + } + """, [ + duplicate_fragment('fragA', 2, 22, 5, 22) + ]) diff --git a/tests/validation/test_unique_input_field_names.py b/tests/validation/test_unique_input_field_names.py new file mode 100644 index 00000000..504299d6 --- /dev/null +++ b/tests/validation/test_unique_input_field_names.py @@ -0,0 +1,69 @@ +from graphql.validation import UniqueInputFieldNamesRule +from graphql.validation.rules.unique_input_field_names import ( + duplicate_input_field_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def duplicate_field(name, l1, c1, l2, c2): + return { + 'message': duplicate_input_field_message(name), + 'locations': [(l1, c1), (l2, c2)]} + + +def describe_validate_unique_input_field_names(): + + def input_object_with_fields(): + expect_passes_rule(UniqueInputFieldNamesRule, """ + { + field(arg: { f: true }) + } + """) + + def same_input_object_within_two_args(): + expect_passes_rule(UniqueInputFieldNamesRule, """ + { + field(arg1: { f: true }, arg2: { f: true }) + } + """) + + def multiple_input_object_fields(): + expect_passes_rule(UniqueInputFieldNamesRule, """ + { + field(arg: { f1: "value", f2: "value", f3: "value" }) + } + """) + + def allows_for_nested_input_objects_with_similar_fields(): + expect_passes_rule(UniqueInputFieldNamesRule, """ + { + field(arg: { + deep: { + deep: { + id: 1 + } + id: 1 + } + id: 1 + }) + } + """) + + def duplicate_input_object_fields(): + expect_fails_rule(UniqueInputFieldNamesRule, """ + { + field(arg: { f1: "value", f1: "value" }) + } + """, [ + duplicate_field('f1', 3, 28, 3, 41) + ]) + + def many_duplicate_input_object_fields(): + expect_fails_rule(UniqueInputFieldNamesRule, """ + { + field(arg: { f1: "value", f1: "value", f1: "value" }) + } + """, [ + duplicate_field('f1', 3, 28, 3, 41), + duplicate_field('f1', 3, 28, 3, 54) + ]) diff --git a/tests/validation/test_unique_operation_names.py b/tests/validation/test_unique_operation_names.py new file mode 100644 index 00000000..2e88046a --- /dev/null +++ b/tests/validation/test_unique_operation_names.py @@ -0,0 +1,107 @@ +from graphql.validation import UniqueOperationNamesRule +from graphql.validation.rules.unique_operation_names import ( + duplicate_operation_name_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def duplicate_op(op_name, l1, c1, l2, c2): + return { + 'message': duplicate_operation_name_message(op_name), + 'locations': [(l1, c1), (l2, c2)]} + + +def describe_validate_unique_operation_names(): + + def no_operations(): + expect_passes_rule(UniqueOperationNamesRule, """ + fragment fragA on Type { + field + } + """) + + def one_anon_operation(): + expect_passes_rule(UniqueOperationNamesRule, """ + { + field + } + """) + + def one_named_operation(): + expect_passes_rule(UniqueOperationNamesRule, """ + query Foo { + field + } + """) + + def multiple_operations(): + expect_passes_rule(UniqueOperationNamesRule, """ + query Foo { + field + } + + query Bar { + field + } + """) + + def multiple_operations_of_different_types(): + expect_passes_rule(UniqueOperationNamesRule, """ + query Foo { + field + } + + mutation Bar { + field + } + + subscription Baz { + field + } + """) + + def fragment_and_operation_named_the_same(): + expect_passes_rule(UniqueOperationNamesRule, """ + query Foo { + ...Foo + } + fragment Foo on Type { + field + } + """) + + def multiple_operations_of_same_name(): + expect_fails_rule(UniqueOperationNamesRule, """ + query Foo { + fieldA + } + query Foo { + fieldB + } + """, [ + duplicate_op('Foo', 2, 19, 5, 19), + ]) + + def multiple_ops_of_same_name_of_different_types_mutation(): + expect_fails_rule(UniqueOperationNamesRule, """ + query Foo { + fieldA + } + mutation Foo { + fieldB + } + """, [ + duplicate_op('Foo', 2, 19, 5, 22), + ]) + + def multiple_ops_of_same_name_of_different_types_subscription(): + expect_fails_rule(UniqueOperationNamesRule, """ + query Foo { + fieldA + } + subscription Foo { + fieldB + } + """, [ + duplicate_op('Foo', 2, 19, 5, 26), + ]) diff --git a/tests/validation/test_unique_variable_names.py b/tests/validation/test_unique_variable_names.py new file mode 100644 index 00000000..b0da79de --- /dev/null +++ b/tests/validation/test_unique_variable_names.py @@ -0,0 +1,32 @@ +from graphql.validation import UniqueVariableNamesRule +from graphql.validation.rules.unique_variable_names import ( + duplicate_variable_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def duplicate_variable(name, l1, c1, l2, c2): + return { + 'message': duplicate_variable_message(name), + 'locations': [(l1, c1), (l2, c2)]} + + +def describe_validate_unique_variable_names(): + + def unique_variable_names(): + expect_passes_rule(UniqueVariableNamesRule, """ + query A($x: Int, $y: String) { __typename } + query B($x: String, $y: Int) { __typename } + """) + + def duplicate_variable_names(): + expect_fails_rule(UniqueVariableNamesRule, """ + query A($x: Int, $x: Int, $x: String) { __typename } + query B($x: String, $x: Int) { __typename } + query C($x: Int, $x: Int) { __typename } + """, [ + duplicate_variable('x', 2, 22, 2, 31), + duplicate_variable('x', 2, 22, 2, 40), + duplicate_variable('x', 3, 22, 3, 34), + duplicate_variable('x', 4, 22, 4, 31), + ]) diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py new file mode 100644 index 00000000..eb23fcf2 --- /dev/null +++ b/tests/validation/test_validation.py @@ -0,0 +1,68 @@ +from graphql.language import parse +from graphql.utilities import TypeInfo +from graphql.validation import specified_rules, validate + +from .harness import test_schema + + +def expect_valid(schema, query_string): + errors = validate(schema, parse(query_string)) + assert not errors, 'Should validate' + + +def describe_validate_supports_full_validation(): + + def validates_queries(): + expect_valid(test_schema, """ + query { + catOrDog { + ... on Cat { + furColor + } + ... on Dog { + isHousetrained + } + } + } + """) + + def detects_bad_scalar_parse(): + doc = """ + query { + invalidArg(arg: "bad value") + } + """ + + errors = validate(test_schema, parse(doc)) + assert errors == [{ + 'message': 'Expected type Invalid, found "bad value";' + ' Invalid scalar is always invalid: bad value', + 'locations': [(3, 31)]}] + + # NOTE: experimental + def validates_using_a_custom_type_info(): + # This TypeInfo will never return a valid field. + type_info = TypeInfo(test_schema, lambda *args: None) + + ast = parse(""" + query { + catOrDog { + ... on Cat { + furColor + } + ... on Dog { + isHousetrained + } + } + } + """) + + errors = validate(test_schema, ast, specified_rules, type_info) + + assert [error.message for error in errors] == [ + "Cannot query field 'catOrDog' on type 'QueryRoot'." + " Did you mean 'catOrDog'?", + "Cannot query field 'furColor' on type 'Cat'." + " Did you mean 'furColor'?", + "Cannot query field 'isHousetrained' on type 'Dog'." + " Did you mean 'isHousetrained'?"] diff --git a/tests/validation/test_values_of_correct_type.py b/tests/validation/test_values_of_correct_type.py new file mode 100644 index 00000000..7ac17889 --- /dev/null +++ b/tests/validation/test_values_of_correct_type.py @@ -0,0 +1,884 @@ +from graphql.validation import ValuesOfCorrectTypeRule +from graphql.validation.rules.values_of_correct_type import ( + bad_value_message, required_field_message, unknown_field_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def bad_value(type_name, value, line, column, message=None): + return { + 'message': bad_value_message(type_name, value, message), + 'locations': [(line, column)]} + + +def required_field(type_name, field_name, field_type_name, line, column): + return { + 'message': required_field_message( + type_name, field_name, field_type_name), + 'locations': [(line, column)]} + + +def unknown_field(type_name, field_name, line, column, message=None): + return { + 'message': unknown_field_message(type_name, field_name, message), + 'locations': [(line, column)]} + + +def describe_validate_values_of_correct_type(): + + def describe_valid_values(): + + def good_int_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + intArgField(intArg: 2) + } + } + """) + + def good_negative_int_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + intArgField(intArg: -2) + } + } + """) + + def good_boolean_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + booleanArgField(intArg: true) + } + } + """) + + def good_string_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringArgField(intArg: "foo") + } + } + """) + + def good_float_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + floatArgField(intArg: 1.1) + } + } + """) + + def good_negative_float_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + floatArgField(intArg: -1.1) + } + } + """) + + def int_into_id(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + idArgField(idArg: 1) + } + } + """) + + def string_into_id(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + idArgField(idArg: "someIdString") + } + } + """) + + def good_enum_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + dog { + doesKnowCommand(dogCommand: SIT) + } + } + """) + + def enum_with_undefined_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + enumArgField(enumArg: UNKNOWN) + } + } + """) + + def enum_with_null_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + enumArgField(enumArg: NO_FUR) + } + } + """) + + def null_into_nullable_type(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + intArgField(intArg: null) + } + } + """) + + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + dog(a: null, b: null, c:{ requiredField: true, intField: null }) { + name + } + } + """) # noqa + + def describe_invalid_string_values(): + + def int_into_string(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringArgField(stringArg: 1) + } + } + """, [ + bad_value('String', '1', 4, 47) + ]) + + def float_into_string(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringArgField(stringArg: 1.0) + } + } + """, [ + bad_value('String', '1.0', 4, 47) + ]) + + def boolean_into_string(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringArgField(stringArg: true) + } + } + """, [ + bad_value('String', 'true', 4, 47) + ]) + + def unquoted_string_into_string(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringArgField(stringArg: BAR) + } + } + """, [ + bad_value('String', 'BAR', 4, 47) + ]) + + def describe_invalid_int_values(): + + def string_into_int(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + intArgField(intArg: "3") + } + } + """, [ + bad_value('Int', '"3"', 4, 41) + ]) + + def big_int_into_int(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + intArgField(intArg: 829384293849283498239482938) + } + } + """, [ + bad_value('Int', '829384293849283498239482938', 4, 41) + ]) + + def unquoted_string_into_int(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + intArgField(intArg: FOO) + } + } + """, [ + bad_value('Int', 'FOO', 4, 41) + ]) + + def simple_float_into_int(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + intArgField(intArg: 3.0) + } + } + """, [ + bad_value('Int', '3.0', 4, 41) + ]) + + def float_into_int(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + intArgField(intArg: 3.333) + } + } + """, [ + bad_value('Int', '3.333', 4, 41) + ]) + + def describe_invalid_float_values(): + + def string_into_float(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + floatArgField(floatArg: "3.333") + } + } + """, [ + bad_value('Float', '"3.333"', 4, 45) + ]) + + def boolean_into_float(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + floatArgField(floatArg: true) + } + } + """, [ + bad_value('Float', 'true', 4, 45) + ]) + + def unquoted_into_float(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + floatArgField(floatArg: FOO) + } + } + """, [ + bad_value('Float', 'FOO', 4, 45) + ]) + + def describe_invalid_boolean_value(): + + def int_into_boolean(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + booleanArgField(booleanArg: 2) + } + } + """, [ + bad_value('Boolean', '2', 4, 49) + ]) + + def float_into_boolean(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + booleanArgField(booleanArg: 1.0) + } + } + """, [ + bad_value('Boolean', '1.0', 4, 49) + ]) + + def string_into_boolean(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + booleanArgField(booleanArg: "true") + } + } + """, [ + bad_value('Boolean', '"true"', 4, 49) + ]) + + def unquoted_into_boolean(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + booleanArgField(booleanArg: TRUE) + } + } + """, [ + bad_value('Boolean', 'TRUE', 4, 49) + ]) + + def describe_invalid_id_value(): + + def float_into_id(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + idArgField(idArg: 1.0) + } + } + """, [ + bad_value('ID', '1.0', 4, 39) + ]) + + def boolean_into_id(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + idArgField(idArg: true) + } + } + """, [ + bad_value('ID', 'true', 4, 39) + ]) + + def unquoted_into_id(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + idArgField(idArg: SOMETHING) + } + } + """, [ + bad_value('ID', 'SOMETHING', 4, 39) + ]) + + def describe_invalid_enum_value(): + + def int_into_enum(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + dog { + doesKnowCommand(dogCommand: 2) + } + } + """, [ + bad_value('DogCommand', '2', 4, 49) + ]) + + def float_into_enum(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + dog { + doesKnowCommand(dogCommand: 1.0) + } + } + """, [ + bad_value('DogCommand', '1.0', 4, 49) + ]) + + def string_into_enum(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + dog { + doesKnowCommand(dogCommand: "SIT") + } + } + """, [ + bad_value('DogCommand', '"SIT"', 4, 49, + 'Did you mean the enum value SIT?') + ]) + + def boolean_into_enum(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + dog { + doesKnowCommand(dogCommand: true) + } + } + """, [ + bad_value('DogCommand', 'true', 4, 49) + ]) + + def unknown_enum_value_into_enum(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + dog { + doesKnowCommand(dogCommand: JUGGLE) + } + } + """, [ + bad_value('DogCommand', 'JUGGLE', 4, 49) + ]) + + def different_case_enum_value_into_enum(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + dog { + doesKnowCommand(dogCommand: sit) + } + } + """, [ + bad_value('DogCommand', 'sit', 4, 49, + 'Did you mean the enum value SIT?') + ]) + + def describe_valid_list_value(): + + def good_list_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringListArgField(stringListArg: ["one", null, "two"]) + } + } + """) + + def empty_list_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringListArgField(stringListArg: []) + } + } + """) + + def null_value(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringListArgField(stringListArg: null) + } + } + """) + + def single_value_into_list(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringListArgField(stringListArg: "one") + } + } + """) + + def describe_invalid_list_value(): + + def incorrect_item_type(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringListArgField(stringListArg: ["one", 2]) + } + } + """, [ + bad_value('String', '2', 4, 63) + ]) + + def single_value_of_incorrect_type(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + stringListArgField(stringListArg: 1) + } + } + """, [ + bad_value('[String]', '1', 4, 55) + ]) + + def describe_valid_non_nullable_value(): + + def arg_on_optional_arg(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + dog { + isHousetrained(atOtherHomes: true) + } + } + """) + + def no_arg_on_optional_arg(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + dog { + isHousetrained + } + } + """) + + def multiple_args(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleReqs(req1: 1, req2: 2) + } + } + """) + + def multiple_args_reverse_order(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleReqs(req2: 2, req1: 1) + } + } + """) + + def no_args_on_multiple_optional(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleOpts + } + } + """) + + def one_arg_on_multiple_optional(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleOpts(opt1: 1) + } + } + """) + + def second_arg_on_multiple_optional(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleOpts(opt2: 1) + } + } + """) + + def multiple_reqs_on_mixed_list(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4) + } + } + """) + + def multiple_reqs_and_one_opt_on_mixed_list(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4, opt1: 5) + } + } + """) + + def all_reqs_and_and_opts_on_mixed_list(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6) + } + } + """) + + def describe_invalid_non_nullable_value(): + + def incorrect_value_type(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleReqs(req2: "two", req1: "one") + } + } + """, [ + bad_value('Int!', '"two"', 4, 40), + bad_value('Int!', '"one"', 4, 53), + ]) + + def incorrect_value_and_missing_argument_provided_required_arguments(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleReqs(req1: "one") + } + } + """, [ + bad_value('Int!', '"one"', 4, 40), + ]) + + def null_value(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + multipleReqs(req1: null) + } + } + """, [ + bad_value('Int!', 'null', 4, 40), + ]) + + def describe_valid_input_object_value(): + + def optional_arg_despite_required_field_in_type(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField + } + } + """) + + def partial_object_only_required(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField(complexArg: { requiredField: true }) + } + } + """) + + def partial_object_required_field_can_be_falsey(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField(complexArg: { requiredField: false }) + } + } + """) + + def partial_object_including_required(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField(complexArg: { requiredField: true, intField: 4 }) + } + } + """) # noqa + + def full_object(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField(complexArg: { + requiredField: true, + intField: 4, + stringField: "foo", + booleanField: false, + stringListField: ["one", "two"] + }) + } + } + """) + + def full_object_with_fields_in_different_order(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField(complexArg: { + stringListField: ["one", "two"], + booleanField: false, + requiredField: true, + stringField: "foo", + intField: 4, + }) + } + } + """) + + def describe_invalid_input_object_value(): + + def partial_object_missing_required(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField(complexArg: { intField: 4 }) + } + } + """, [ + required_field( + 'ComplexInput', 'requiredField', 'Boolean!', 4, 49), + ]) + + def partial_object_invalid_field_type(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField(complexArg: { + stringListField: ["one", 2], + requiredField: true, + }) + } + } + """, [ + bad_value('String', '2', 5, 48), + ]) + + def partial_object_null_to_non_null_field(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField(complexArg: { + requiredField: true, + nonNullField: null, + }) + } + } + """, [ + bad_value('Boolean!', 'null', 6, 37), + ]) + + def partial_object_unknown_field_arg(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + complicatedArgs { + complexArgField(complexArg: { + requiredField: true, + unknownField: "value" + }) + } + } + """, [ + unknown_field( + 'ComplexInput', 'unknownField', 6, 23, + 'Did you mean nonNullField, intField or booleanField?') + ]) + + def reports_original_error_for_custom_scalar_which_throws(): + errors = expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + invalidArg(arg: 123) + } + """, [ + bad_value('Invalid', '123', 3, 35, + 'Invalid scalar is always invalid: 123') + ]) + assert str(errors[0].original_error) == ( + 'Invalid scalar is always invalid: 123') + + def allows_custom_scalar_to_accept_complex_literals(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + test1: anyArg(arg: 123) + test2: anyArg(arg: "abc") + test3: anyArg(arg: [123, "abc"]) + test4: anyArg(arg: {deep: [123, "abc"]}) + } + """) + + def describe_directive_arguments(): + + def with_directives_of_valid_types(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + { + dog @include(if: true) { + name + } + human @skip(if: false) { + name + } + } + """) + + def with_directives_with_incorrect_types(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + { + dog @include(if: "yes") { + name @skip(if: ENUM) + } + } + """, [ + bad_value('Boolean!', '"yes"', 3, 36), + bad_value('Boolean!', 'ENUM', 4, 36), + ]) + + def describe_variable_default_values(): + + def variables_with_valid_default_values(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + query WithDefaultValues( + $a: Int = 1, + $b: String = "ok", + $c: ComplexInput = { requiredField: true, intField: 3 } + $d: Int! = 123 + ) { + dog { name } + } + """) + + def variables_with_valid_default_null_values(): + expect_passes_rule(ValuesOfCorrectTypeRule, """ + query WithDefaultValues( + $a: Int = null, + $b: String = null, + $c: ComplexInput = { requiredField: true, intField: null } + ) { + dog { name } + } + """) + + def variables_with_invalid_default_null_values(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + query WithDefaultValues( + $a: Int! = null, + $b: String! = null, + $c: ComplexInput = { requiredField: null, intField: null } + ) { + dog { name } + } + """, [ + bad_value('Int!', 'null', 3, 30), + bad_value('String!', 'null', 4, 33), + bad_value('Boolean!', 'null', 5, 55), + ]) + + def variables_with_invalid_default_values(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + query InvalidDefaultValues( + $a: Int = "one", + $b: String = 4, + $c: ComplexInput = "notverycomplex" + ) { + dog { name } + } + """, [ + bad_value('Int', '"one"', 3, 29), + bad_value('String', '4', 4, 32), + bad_value('ComplexInput', '"notverycomplex"', 5, 38), + ]) + + def variables_with_complex_invalid_default_values(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + query WithDefaultValues( + $a: ComplexInput = { requiredField: 123, intField: "abc" } + ) { + dog { name } + } + """, [ + bad_value('Boolean!', '123', 3, 55), + bad_value('Int', '"abc"', 3, 70), + ]) + + def complex_variables_missing_required_fields(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + query MissingRequiredField($a: ComplexInput = {intField: 3}) { + dog { name } + } + """, [ + required_field( + 'ComplexInput', 'requiredField', 'Boolean!', 2, 63) + ]) + + def list_variables_with_invalid_item(): + expect_fails_rule(ValuesOfCorrectTypeRule, """ + query InvalidItem($a: [String] = ["one", 2]) { + dog { name } + } + """, [ + bad_value('String', '2', 2, 58) + ]) diff --git a/tests/validation/test_variables_are_input_types.py b/tests/validation/test_variables_are_input_types.py new file mode 100644 index 00000000..cff23234 --- /dev/null +++ b/tests/validation/test_variables_are_input_types.py @@ -0,0 +1,31 @@ +from graphql.validation import VariablesAreInputTypesRule +from graphql.validation.rules.variables_are_input_types import ( + non_input_type_on_var_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def describe_validate_variables_are_input_types(): + + def input_types_are_valid(): + expect_passes_rule(VariablesAreInputTypesRule, """ + query Foo($a: String, $b: [Boolean!]!, $c: ComplexInput) { + field(a: $a, b: $b, c: $c) + } + """) + + def output_types_are_invalid(): + expect_fails_rule(VariablesAreInputTypesRule, """ + query Foo($a: Dog, $b: [[CatOrDog!]]!, $c: Pet) { + field(a: $a, b: $b, c: $c) + } + """, [{ + 'locations': [(2, 27)], + 'message': non_input_type_on_var_message('a', 'Dog') + }, { + 'locations': [(2, 36)], + 'message': non_input_type_on_var_message('b', '[[CatOrDog!]]!') + }, { + 'locations': [(2, 56)], + 'message': non_input_type_on_var_message('c', 'Pet') + }]) diff --git a/tests/validation/test_variables_in_allowed_position.py b/tests/validation/test_variables_in_allowed_position.py new file mode 100644 index 00000000..2807240e --- /dev/null +++ b/tests/validation/test_variables_in_allowed_position.py @@ -0,0 +1,280 @@ +from graphql.validation import VariablesInAllowedPositionRule +from graphql.validation.rules.variables_in_allowed_position import ( + bad_var_pos_message) + +from .harness import expect_fails_rule, expect_passes_rule + + +def describe_validate_variables_are_in_allowed_positions(): + + def boolean_to_boolean(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($booleanArg: Boolean) + { + complicatedArgs { + booleanArgField(booleanArg: $booleanArg) + } + } + """) + + def boolean_to_boolean_in_fragment(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + fragment booleanArgFrag on ComplicatedArgs { + booleanArgField(booleanArg: $booleanArg) + } + query Query($booleanArg: Boolean) + { + complicatedArgs { + ...booleanArgFrag + } + } + """) + + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($booleanArg: Boolean) + { + complicatedArgs { + ...booleanArgFrag + } + } + fragment booleanArgFrag on ComplicatedArgs { + booleanArgField(booleanArg: $booleanArg) + } + """) + + def non_null_boolean_to_boolean(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($nonNullBooleanArg: Boolean!) + { + complicatedArgs { + booleanArgField(booleanArg: $nonNullBooleanArg) + } + } + """) + + def non_null_boolean_to_boolean_within_fragment(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + fragment booleanArgFrag on ComplicatedArgs { + booleanArgField(booleanArg: $nonNullBooleanArg) + } + + query Query($nonNullBooleanArg: Boolean!) + { + complicatedArgs { + ...booleanArgFrag + } + } + """) + + def array_of_string_to_array_of_string(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($stringListVar: [String]) + { + complicatedArgs { + stringListArgField(stringListArg: $stringListVar) + } + } + """) + + def array_of_non_null_string_to_array_of_string(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($stringListVar: [String!]) + { + complicatedArgs { + stringListArgField(stringListArg: $stringListVar) + } + } + """) + + def string_to_array_of_string_in_item_position(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($stringVar: String) + { + complicatedArgs { + stringListArgField(stringListArg: [$stringVar]) + } + } + """) + + def non_null_string_to_array_of_string_in_item_position(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($stringVar: String!) + { + complicatedArgs { + stringListArgField(stringListArg: [$stringVar]) + } + } + """) + + def complex_input_to_complex_input(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($complexVar: ComplexInput) + { + complicatedArgs { + complexArgField(complexArg: $complexVar) + } + } + """) + + def complex_input_to_complex_input_in_field_position(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($boolVar: Boolean = false) + { + complicatedArgs { + complexArgField(complexArg: {requiredArg: $boolVar}) + } + } + """) + + def non_null_boolean_to_non_null_boolean_in_directive(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($boolVar: Boolean!) + { + dog @include(if: $boolVar) + } + """) + + def int_to_non_null_int(): + expect_fails_rule(VariablesInAllowedPositionRule, """ + query Query($intArg: Int) { + complicatedArgs { + nonNullIntArgField(nonNullIntArg: $intArg) + } + } + """, [{ + 'message': bad_var_pos_message('intArg', 'Int', 'Int!'), + 'locations': [(2, 25), (4, 51)] + }]) + + def int_to_non_null_int_within_fragment(): + expect_fails_rule(VariablesInAllowedPositionRule, """ + fragment nonNullIntArgFieldFrag on ComplicatedArgs { + nonNullIntArgField(nonNullIntArg: $intArg) + } + + query Query($intArg: Int) { + complicatedArgs { + ...nonNullIntArgFieldFrag + } + } + """, [{ + 'message': bad_var_pos_message('intArg', 'Int', 'Int!'), + 'locations': [(6, 25), (3, 49)] + }]) + + def int_to_non_null_int_within_nested_fragment(): + expect_fails_rule(VariablesInAllowedPositionRule, """ + fragment outerFrag on ComplicatedArgs { + ...nonNullIntArgFieldFrag + } + + fragment nonNullIntArgFieldFrag on ComplicatedArgs { + nonNullIntArgField(nonNullIntArg: $intArg) + } + + query Query($intArg: Int) { + complicatedArgs { + ...outerFrag + } + } + """, [{ + 'message': bad_var_pos_message('intArg', 'Int', 'Int!'), + 'locations': [(10, 25), (7, 49)] + }]) + + def string_to_boolean(): + expect_fails_rule(VariablesInAllowedPositionRule, """ + query Query($stringVar: String) { + complicatedArgs { + booleanArgField(booleanArg: $stringVar) + } + } + """, [{ + 'message': bad_var_pos_message('stringVar', 'String', 'Boolean'), + 'locations': [(2, 25), (4, 45)] + }]) + + def string_to_array_of_string(): + expect_fails_rule(VariablesInAllowedPositionRule, """ + query Query($stringVar: String) { + complicatedArgs { + stringListArgField(stringListArg: $stringVar) + } + } + """, [{ + 'message': bad_var_pos_message('stringVar', 'String', '[String]'), + 'locations': [(2, 25), (4, 51)] + }]) + + def boolean_to_non_null_boolean_in_directive(): + expect_fails_rule(VariablesInAllowedPositionRule, """ + query Query($boolVar: Boolean) { + dog @include(if: $boolVar) + } + """, [{ + 'message': bad_var_pos_message('boolVar', 'Boolean', 'Boolean!'), + 'locations': [(2, 25), (3, 32)] + }]) + + def string_to_non_null_boolean_in_directive(): + expect_fails_rule(VariablesInAllowedPositionRule, """ + query Query($stringVar: String) { + dog @include(if: $stringVar) + } + """, [{ + 'message': bad_var_pos_message('stringVar', 'String', 'Boolean!'), + 'locations': [(2, 25), (3, 32)] + }]) + + def array_of_string_to_array_of_non_null_string(): + expect_fails_rule(VariablesInAllowedPositionRule, """ + query Query($stringListVar: [String]) + { + complicatedArgs { + stringListNonNullArgField(stringListNonNullArg: $stringListVar) + } + } + """, [{ + 'message': bad_var_pos_message( + 'stringListVar', '[String]', '[String!]'), + 'locations': [(2, 25), (5, 65)] + }]) + + def describe_allows_optional_nullable_variables_with_default_values(): + + def int_to_non_null_int_fails_when_var_provides_null_default_value(): + expect_fails_rule(VariablesInAllowedPositionRule, """ + query Query($intVar: Int = null) { + complicatedArgs { + nonNullIntArgField(nonNullIntArg: $intVar) + } + } + """, [{ + 'message': bad_var_pos_message('intVar', 'Int', 'Int!'), + 'locations': [(2, 29), (4, 55)] + }]) + + def int_to_non_null_int_when_var_provides_non_null_default_value(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($intVar: Int = 1) { + complicatedArgs { + nonNullIntArgField(nonNullIntArg: $intVar) + } + } + """) + + def int_to_non_null_int_when_optional_arg_provides_default_value(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($intVar: Int) { + complicatedArgs { + nonNullFieldWithDefault(nonNullIntArg: $intVar) + } + } + """) + + def bool_to_non_null_bool_in_directive_with_default_value_with_option(): + expect_passes_rule(VariablesInAllowedPositionRule, """ + query Query($boolVar: Boolean = false) { + dog @include(if: $boolVar) + } + """) diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..b0d08c4c --- /dev/null +++ b/tox.ini @@ -0,0 +1,30 @@ +[tox] +envlist = py{36,37}, flake8, mypy + +[travis] +python = + 3.7: py37 + 3.6: py36 + +[testenv:flake8] +basepython = python +deps = flake8 +commands = + flake8 graphql tests + +[testenv:mypy] +basepython = python +deps = mypy +commands = + mypy graphql + +[testenv] +setenv = + PYTHONPATH = {toxinidir} +deps = + pytest + pytest-asyncio + pytest-describe +commands = + python -m pip install -U pip + pytest