aboutsummaryrefslogtreecommitdiffstats
path: root/vendor/github.com/vektah/gqlparser/validator
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/vektah/gqlparser/validator')
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/error.go55
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/imported/LICENSE33
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/messaging.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/prelude.go9
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/prelude.graphql119
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go86
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go57
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go31
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go61
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go93
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go557
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go68
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go63
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go36
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go33
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go24
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go27
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go23
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go130
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go36
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/schema.go276
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/schema_test.yml323
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/suggestionList.go69
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/validator.go44
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/vars.go199
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/walk.go286
36 files changed, 0 insertions, 3044 deletions
diff --git a/vendor/github.com/vektah/gqlparser/validator/error.go b/vendor/github.com/vektah/gqlparser/validator/error.go
deleted file mode 100644
index f354dee5..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/error.go
+++ /dev/null
@@ -1,55 +0,0 @@
-package validator
-
-import (
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
-)
-
-type ErrorOption func(err *gqlerror.Error)
-
-func Message(msg string, args ...interface{}) ErrorOption {
- return func(err *gqlerror.Error) {
- err.Message += fmt.Sprintf(msg, args...)
- }
-}
-
-func At(position *ast.Position) ErrorOption {
- return func(err *gqlerror.Error) {
- if position == nil {
- return
- }
- err.Locations = append(err.Locations, gqlerror.Location{
- Line: position.Line,
- Column: position.Column,
- })
- if position.Src.Name != "" {
- err.SetFile(position.Src.Name)
- }
- }
-}
-
-func SuggestListQuoted(prefix string, typed string, suggestions []string) ErrorOption {
- suggested := SuggestionList(typed, suggestions)
- return func(err *gqlerror.Error) {
- if len(suggested) > 0 {
- err.Message += " " + prefix + " " + QuotedOrList(suggested...) + "?"
- }
- }
-}
-
-func SuggestListUnquoted(prefix string, typed string, suggestions []string) ErrorOption {
- suggested := SuggestionList(typed, suggestions)
- return func(err *gqlerror.Error) {
- if len(suggested) > 0 {
- err.Message += " " + prefix + " " + OrList(suggested...) + "?"
- }
- }
-}
-
-func Suggestf(suggestion string, args ...interface{}) ErrorOption {
- return func(err *gqlerror.Error) {
- err.Message += " Did you mean " + fmt.Sprintf(suggestion, args...) + "?"
- }
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE b/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE
deleted file mode 100644
index fce4519e..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE
+++ /dev/null
@@ -1,33 +0,0 @@
-The files in this testdata directory are derived from the graphql-js project:
-https://github.com/graphql/graphql-js
-
-BSD License
-
-For GraphQL software
-
-Copyright (c) 2015, Facebook, Inc. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-
- * Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
-
- * Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
-
- * Neither the name Facebook nor the names of its contributors may be used to
- endorse or promote products derived from this software without specific
- prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file
diff --git a/vendor/github.com/vektah/gqlparser/validator/messaging.go b/vendor/github.com/vektah/gqlparser/validator/messaging.go
deleted file mode 100644
index f1ab5873..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/messaging.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package validator
-
-import "bytes"
-
-// Given [ A, B, C ] return '"A", "B", or "C"'.
-func QuotedOrList(items ...string) string {
- itemsQuoted := make([]string, len(items))
- for i, item := range items {
- itemsQuoted[i] = `"` + item + `"`
- }
- return OrList(itemsQuoted...)
-}
-
-// Given [ A, B, C ] return 'A, B, or C'.
-func OrList(items ...string) string {
- var buf bytes.Buffer
-
- if len(items) > 5 {
- items = items[:5]
- }
- if len(items) == 2 {
- buf.WriteString(items[0])
- buf.WriteString(" or ")
- buf.WriteString(items[1])
- return buf.String()
- }
-
- for i, item := range items {
- if i != 0 {
- if i == len(items)-1 {
- buf.WriteString(", or ")
- } else {
- buf.WriteString(", ")
- }
- }
- buf.WriteString(item)
- }
- return buf.String()
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/prelude.go b/vendor/github.com/vektah/gqlparser/validator/prelude.go
deleted file mode 100644
index c7a4d35b..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/prelude.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package validator
-
-import "github.com/vektah/gqlparser/ast"
-
-var Prelude = &ast.Source{
- Name: "prelude.graphql",
- Input: "# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema\n\n# The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.\nscalar Int\n\n# The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).\nscalar Float\n\n# The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.\nscalar String\n\n# The `Boolean` scalar type represents ` + \"`\" + `true` + \"`\" + ` or ` + \"`\" + `false` + \"`\" + `.\nscalar Boolean\n\n# The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as \"4\") or integer (such as 4) input value will be accepted as an ID.\nscalar ID\n\n# The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument.\ndirective @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT\n\n# The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument.\ndirective @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT\n\n# The @deprecated directive is used within the type system definition language to indicate deprecated portions of a GraphQL service’s schema, such as deprecated fields on a type or deprecated enum values.\ndirective @deprecated(reason: String = \"No longer supported\") on FIELD_DEFINITION | ENUM_VALUE\n\ntype __Schema {\n types: [__Type!]!\n queryType: __Type!\n mutationType: __Type\n subscriptionType: __Type\n directives: [__Directive!]!\n}\n\ntype __Type {\n kind: __TypeKind!\n name: String\n description: String\n\n # OBJECT and INTERFACE only\n fields(includeDeprecated: Boolean = false): [__Field!]\n\n # OBJECT only\n interfaces: [__Type!]\n\n # INTERFACE and UNION only\n possibleTypes: [__Type!]\n\n # ENUM only\n enumValues(includeDeprecated: Boolean = false): [__EnumValue!]\n\n # INPUT_OBJECT only\n inputFields: [__InputValue!]\n\n # NON_NULL and LIST only\n ofType: __Type\n}\n\ntype __Field {\n name: String!\n description: String\n args: [__InputValue!]!\n type: __Type!\n isDeprecated: Boolean!\n deprecationReason: String\n}\n\ntype __InputValue {\n name: String!\n description: String\n type: __Type!\n defaultValue: String\n}\n\ntype __EnumValue {\n name: String!\n description: String\n isDeprecated: Boolean!\n deprecationReason: String\n}\n\nenum __TypeKind {\n SCALAR\n OBJECT\n INTERFACE\n UNION\n ENUM\n INPUT_OBJECT\n LIST\n NON_NULL\n}\n\ntype __Directive {\n name: String!\n description: String\n locations: [__DirectiveLocation!]!\n args: [__InputValue!]!\n}\n\nenum __DirectiveLocation {\n QUERY\n MUTATION\n SUBSCRIPTION\n FIELD\n FRAGMENT_DEFINITION\n FRAGMENT_SPREAD\n INLINE_FRAGMENT\n SCHEMA\n SCALAR\n OBJECT\n FIELD_DEFINITION\n ARGUMENT_DEFINITION\n INTERFACE\n UNION\n ENUM\n ENUM_VALUE\n INPUT_OBJECT\n INPUT_FIELD_DEFINITION\n}\n",
- BuiltIn: true,
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/prelude.graphql b/vendor/github.com/vektah/gqlparser/validator/prelude.graphql
deleted file mode 100644
index 2c7f7c02..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/prelude.graphql
+++ /dev/null
@@ -1,119 +0,0 @@
-# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema
-
-# The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
-scalar Int
-
-# The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
-scalar Float
-
-# The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
-scalar String
-
-# The `Boolean` scalar type represents ` + "`" + `true` + "`" + ` or ` + "`" + `false` + "`" + `.
-scalar Boolean
-
-# The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as "4") or integer (such as 4) input value will be accepted as an ID.
-scalar ID
-
-# The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument.
-directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
-
-# The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument.
-directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
-
-# The @deprecated directive is used within the type system definition language to indicate deprecated portions of a GraphQL service’s schema, such as deprecated fields on a type or deprecated enum values.
-directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITION | ENUM_VALUE
-
-type __Schema {
- types: [__Type!]!
- queryType: __Type!
- mutationType: __Type
- subscriptionType: __Type
- directives: [__Directive!]!
-}
-
-type __Type {
- kind: __TypeKind!
- name: String
- description: String
-
- # OBJECT and INTERFACE only
- fields(includeDeprecated: Boolean = false): [__Field!]
-
- # OBJECT only
- interfaces: [__Type!]
-
- # INTERFACE and UNION only
- possibleTypes: [__Type!]
-
- # ENUM only
- enumValues(includeDeprecated: Boolean = false): [__EnumValue!]
-
- # INPUT_OBJECT only
- inputFields: [__InputValue!]
-
- # NON_NULL and LIST only
- ofType: __Type
-}
-
-type __Field {
- name: String!
- description: String
- args: [__InputValue!]!
- type: __Type!
- isDeprecated: Boolean!
- deprecationReason: String
-}
-
-type __InputValue {
- name: String!
- description: String
- type: __Type!
- defaultValue: String
-}
-
-type __EnumValue {
- name: String!
- description: String
- isDeprecated: Boolean!
- deprecationReason: String
-}
-
-enum __TypeKind {
- SCALAR
- OBJECT
- INTERFACE
- UNION
- ENUM
- INPUT_OBJECT
- LIST
- NON_NULL
-}
-
-type __Directive {
- name: String!
- description: String
- locations: [__DirectiveLocation!]!
- args: [__InputValue!]!
-}
-
-enum __DirectiveLocation {
- QUERY
- MUTATION
- SUBSCRIPTION
- FIELD
- FRAGMENT_DEFINITION
- FRAGMENT_SPREAD
- INLINE_FRAGMENT
- SCHEMA
- SCALAR
- OBJECT
- FIELD_DEFINITION
- ARGUMENT_DEFINITION
- INTERFACE
- UNION
- ENUM
- ENUM_VALUE
- INPUT_OBJECT
- INPUT_FIELD_DEFINITION
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
deleted file mode 100644
index 69148d52..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
+++ /dev/null
@@ -1,86 +0,0 @@
-package validator
-
-import (
- "fmt"
- "sort"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("FieldsOnCorrectType", func(observers *Events, addError AddErrFunc) {
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if field.ObjectDefinition == nil || field.Definition != nil {
- return
- }
-
- message := fmt.Sprintf(`Cannot query field "%s" on type "%s".`, field.Name, field.ObjectDefinition.Name)
-
- if suggestedTypeNames := getSuggestedTypeNames(walker, field.ObjectDefinition, field.Name); suggestedTypeNames != nil {
- message += " Did you mean to use an inline fragment on " + QuotedOrList(suggestedTypeNames...) + "?"
- } else if suggestedFieldNames := getSuggestedFieldNames(field.ObjectDefinition, field.Name); suggestedFieldNames != nil {
- message += " Did you mean " + QuotedOrList(suggestedFieldNames...) + "?"
- }
-
- addError(
- Message(message),
- At(field.Position),
- )
- })
- })
-}
-
-// Go through all of the implementations of type, as well as the interfaces
-// that they implement. If any of those types include the provided field,
-// suggest them, sorted by how often the type is referenced, starting
-// with Interfaces.
-func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) []string {
- if !parent.IsAbstractType() {
- return nil
- }
-
- var suggestedObjectTypes []string
- var suggestedInterfaceTypes []string
- interfaceUsageCount := map[string]int{}
-
- for _, possibleType := range walker.Schema.GetPossibleTypes(parent) {
- field := possibleType.Fields.ForName(name)
- if field == nil {
- continue
- }
-
- suggestedObjectTypes = append(suggestedObjectTypes, possibleType.Name)
-
- for _, possibleInterface := range possibleType.Interfaces {
- interfaceField := walker.Schema.Types[possibleInterface]
- if interfaceField != nil && interfaceField.Fields.ForName(name) != nil {
- if interfaceUsageCount[possibleInterface] == 0 {
- suggestedInterfaceTypes = append(suggestedInterfaceTypes, possibleInterface)
- }
- interfaceUsageCount[possibleInterface]++
- }
- }
- }
-
- sort.SliceStable(suggestedInterfaceTypes, func(i, j int) bool {
- return interfaceUsageCount[suggestedInterfaceTypes[i]] > interfaceUsageCount[suggestedInterfaceTypes[j]]
- })
-
- return append(suggestedInterfaceTypes, suggestedObjectTypes...)
-}
-
-// For the field name provided, determine if there are any similar field names
-// that may be the result of a typo.
-func getSuggestedFieldNames(parent *ast.Definition, name string) []string {
- if parent.Kind != ast.Object && parent.Kind != ast.Interface {
- return nil
- }
-
- var possibleFieldNames []string
- for _, field := range parent.Fields {
- possibleFieldNames = append(possibleFieldNames, field.Name)
- }
-
- return SuggestionList(name, possibleFieldNames)
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
deleted file mode 100644
index a4a48246..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package validator
-
-import (
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("FragmentsOnCompositeTypes", func(observers *Events, addError AddErrFunc) {
- observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
- fragmentType := walker.Schema.Types[inlineFragment.TypeCondition]
- if fragmentType == nil || fragmentType.IsCompositeType() {
- return
- }
-
- message := fmt.Sprintf(`Fragment cannot condition on non composite type "%s".`, inlineFragment.TypeCondition)
-
- addError(
- Message(message),
- At(inlineFragment.Position),
- )
- })
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() {
- return
- }
-
- message := fmt.Sprintf(`Fragment "%s" cannot condition on non composite type "%s".`, fragment.Name, fragment.TypeCondition)
-
- addError(
- Message(message),
- At(fragment.Position),
- )
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
deleted file mode 100644
index 1a46431d..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
+++ /dev/null
@@ -1,57 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) {
- // A GraphQL field is only valid if all supplied arguments are defined by that field.
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if field.Definition == nil || field.ObjectDefinition == nil {
- return
- }
- for _, arg := range field.Arguments {
- def := field.Definition.Arguments.ForName(arg.Name)
- if def != nil {
- continue
- }
-
- var suggestions []string
- for _, argDef := range field.Definition.Arguments {
- suggestions = append(suggestions, argDef.Name)
- }
-
- addError(
- Message(`Unknown argument "%s" on field "%s" of type "%s".`, arg.Name, field.Name, field.ObjectDefinition.Name),
- SuggestListQuoted("Did you mean", arg.Name, suggestions),
- At(field.Position),
- )
- }
- })
-
- observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
- if directive.Definition == nil {
- return
- }
- for _, arg := range directive.Arguments {
- def := directive.Definition.Arguments.ForName(arg.Name)
- if def != nil {
- continue
- }
-
- var suggestions []string
- for _, argDef := range directive.Definition.Arguments {
- suggestions = append(suggestions, argDef.Name)
- }
-
- addError(
- Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name),
- SuggestListQuoted("Did you mean", arg.Name, suggestions),
- At(directive.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
deleted file mode 100644
index dc4353ef..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("KnownDirectives", func(observers *Events, addError AddErrFunc) {
- observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
- if directive.Definition == nil {
- addError(
- Message(`Unknown directive "%s".`, directive.Name),
- At(directive.Position),
- )
- return
- }
-
- for _, loc := range directive.Definition.Locations {
- if loc == directive.Location {
- return
- }
- }
-
- addError(
- Message(`Directive "%s" may not be used on %s.`, directive.Name, directive.Location),
- At(directive.Position),
- )
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
deleted file mode 100644
index ec91588c..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) {
- observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
- if fragmentSpread.Definition == nil {
- addError(
- Message(`Unknown fragment "%s".`, fragmentSpread.Name),
- At(fragmentSpread.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
deleted file mode 100644
index 223086b3..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("KnownTypeNames", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- for _, vdef := range operation.VariableDefinitions {
- typeName := vdef.Type.Name()
- def := walker.Schema.Types[typeName]
- if def != nil {
- continue
- }
-
- addError(
- Message(`Unknown type "%s".`, typeName),
- At(operation.Position),
- )
- }
- })
-
- observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
- typedName := inlineFragment.TypeCondition
- if typedName == "" {
- return
- }
-
- def := walker.Schema.Types[typedName]
- if def != nil {
- return
- }
-
- addError(
- Message(`Unknown type "%s".`, typedName),
- At(inlineFragment.Position),
- )
- })
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- typeName := fragment.TypeCondition
- def := walker.Schema.Types[typeName]
- if def != nil {
- return
- }
-
- var possibleTypes []string
- for _, t := range walker.Schema.Types {
- possibleTypes = append(possibleTypes, t.Name)
- }
-
- addError(
- Message(`Unknown type "%s".`, typeName),
- SuggestListQuoted("Did you mean", typeName, possibleTypes),
- At(fragment.Position),
- )
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go b/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
deleted file mode 100644
index dd232142..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("LoneAnonymousOperation", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- if operation.Name == "" && len(walker.Document.Operations) > 1 {
- addError(
- Message(`This anonymous operation must be the only defined operation.`),
- At(operation.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
deleted file mode 100644
index 7511529b..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
+++ /dev/null
@@ -1,93 +0,0 @@
-package validator
-
-import (
- "fmt"
- "strings"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("NoFragmentCycles", func(observers *Events, addError AddErrFunc) {
- visitedFrags := make(map[string]bool)
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- var spreadPath []*ast.FragmentSpread
- spreadPathIndexByName := make(map[string]int)
-
- var recursive func(fragment *ast.FragmentDefinition)
- recursive = func(fragment *ast.FragmentDefinition) {
- if visitedFrags[fragment.Name] {
- return
- }
-
- visitedFrags[fragment.Name] = true
-
- spreadNodes := getFragmentSpreads(fragment.SelectionSet)
- if len(spreadNodes) == 0 {
- return
- }
- spreadPathIndexByName[fragment.Name] = len(spreadPath)
-
- for _, spreadNode := range spreadNodes {
- spreadName := spreadNode.Name
-
- cycleIndex, ok := spreadPathIndexByName[spreadName]
-
- spreadPath = append(spreadPath, spreadNode)
- if !ok {
- spreadFragment := walker.Document.Fragments.ForName(spreadName)
- if spreadFragment != nil {
- recursive(spreadFragment)
- }
- } else {
- cyclePath := spreadPath[cycleIndex : len(spreadPath)-1]
- var fragmentNames []string
- for _, fs := range cyclePath {
- fragmentNames = append(fragmentNames, fs.Name)
- }
- var via string
- if len(fragmentNames) != 0 {
- via = fmt.Sprintf(" via %s", strings.Join(fragmentNames, ", "))
- }
- addError(
- Message(`Cannot spread fragment "%s" within itself%s.`, spreadName, via),
- At(spreadNode.Position),
- )
- }
-
- spreadPath = spreadPath[:len(spreadPath)-1]
- }
-
- delete(spreadPathIndexByName, fragment.Name)
- }
-
- recursive(fragment)
- })
- })
-}
-
-func getFragmentSpreads(node ast.SelectionSet) []*ast.FragmentSpread {
- var spreads []*ast.FragmentSpread
-
- setsToVisit := []ast.SelectionSet{node}
-
- for len(setsToVisit) != 0 {
- set := setsToVisit[len(setsToVisit)-1]
- setsToVisit = setsToVisit[:len(setsToVisit)-1]
-
- for _, selection := range set {
- switch selection := selection.(type) {
- case *ast.FragmentSpread:
- spreads = append(spreads, selection)
- case *ast.Field:
- setsToVisit = append(setsToVisit, selection.SelectionSet)
- case *ast.InlineFragment:
- setsToVisit = append(setsToVisit, selection.SelectionSet)
- }
- }
- }
-
- return spreads
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
deleted file mode 100644
index 505206be..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("NoUndefinedVariables", func(observers *Events, addError AddErrFunc) {
- observers.OnValue(func(walker *Walker, value *ast.Value) {
- if walker.CurrentOperation == nil || value.Kind != ast.Variable || value.VariableDefinition != nil {
- return
- }
-
- if walker.CurrentOperation.Name != "" {
- addError(
- Message(`Variable "%s" is not defined by operation "%s".`, value, walker.CurrentOperation.Name),
- At(walker.CurrentOperation.Position),
- )
- } else {
- addError(
- Message(`Variable "%s" is not defined.`, value),
- At(value.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
deleted file mode 100644
index 4aa835f5..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("NoUnusedFragments", func(observers *Events, addError AddErrFunc) {
-
- inFragmentDefinition := false
- fragmentNameUsed := make(map[string]bool)
-
- observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
- if !inFragmentDefinition {
- fragmentNameUsed[fragmentSpread.Name] = true
- }
- })
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- inFragmentDefinition = true
- if !fragmentNameUsed[fragment.Name] {
- addError(
- Message(`Fragment "%s" is never used.`, fragment.Name),
- At(fragment.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
deleted file mode 100644
index 28cf7736..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- for _, varDef := range operation.VariableDefinitions {
- if varDef.Used {
- continue
- }
-
- if operation.Name != "" {
- addError(
- Message(`Variable "$%s" is never used in operation "%s".`, varDef.Variable, operation.Name),
- At(varDef.Position),
- )
- } else {
- addError(
- Message(`Variable "$%s" is never used.`, varDef.Variable),
- At(varDef.Position),
- )
- }
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go b/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
deleted file mode 100644
index bb2f1831..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
+++ /dev/null
@@ -1,557 +0,0 @@
-package validator
-
-import (
- "bytes"
- "fmt"
- "reflect"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
-
- AddRule("OverlappingFieldsCanBeMerged", func(observers *Events, addError AddErrFunc) {
- /**
- * Algorithm:
- *
- * Conflicts occur when two fields exist in a query which will produce the same
- * response name, but represent differing values, thus creating a conflict.
- * The algorithm below finds all conflicts via making a series of comparisons
- * between fields. In order to compare as few fields as possible, this makes
- * a series of comparisons "within" sets of fields and "between" sets of fields.
- *
- * Given any selection set, a collection produces both a set of fields by
- * also including all inline fragments, as well as a list of fragments
- * referenced by fragment spreads.
- *
- * A) Each selection set represented in the document first compares "within" its
- * collected set of fields, finding any conflicts between every pair of
- * overlapping fields.
- * Note: This is the *only time* that a the fields "within" a set are compared
- * to each other. After this only fields "between" sets are compared.
- *
- * B) Also, if any fragment is referenced in a selection set, then a
- * comparison is made "between" the original set of fields and the
- * referenced fragment.
- *
- * C) Also, if multiple fragments are referenced, then comparisons
- * are made "between" each referenced fragment.
- *
- * D) When comparing "between" a set of fields and a referenced fragment, first
- * a comparison is made between each field in the original set of fields and
- * each field in the the referenced set of fields.
- *
- * E) Also, if any fragment is referenced in the referenced selection set,
- * then a comparison is made "between" the original set of fields and the
- * referenced fragment (recursively referring to step D).
- *
- * F) When comparing "between" two fragments, first a comparison is made between
- * each field in the first referenced set of fields and each field in the the
- * second referenced set of fields.
- *
- * G) Also, any fragments referenced by the first must be compared to the
- * second, and any fragments referenced by the second must be compared to the
- * first (recursively referring to step F).
- *
- * H) When comparing two fields, if both have selection sets, then a comparison
- * is made "between" both selection sets, first comparing the set of fields in
- * the first selection set with the set of fields in the second.
- *
- * I) Also, if any fragment is referenced in either selection set, then a
- * comparison is made "between" the other set of fields and the
- * referenced fragment.
- *
- * J) Also, if two fragments are referenced in both selection sets, then a
- * comparison is made "between" the two fragments.
- *
- */
-
- m := &overlappingFieldsCanBeMergedManager{
- comparedFragmentPairs: pairSet{data: make(map[string]map[string]bool)},
- }
-
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- m.walker = walker
- conflicts := m.findConflictsWithinSelectionSet(operation.SelectionSet)
- for _, conflict := range conflicts {
- conflict.addFieldsConflictMessage(addError)
- }
- })
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if walker.CurrentOperation == nil {
- // When checking both Operation and Fragment, errors are duplicated when processing FragmentDefinition referenced from Operation
- return
- }
- m.walker = walker
- conflicts := m.findConflictsWithinSelectionSet(field.SelectionSet)
- for _, conflict := range conflicts {
- conflict.addFieldsConflictMessage(addError)
- }
- })
- observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
- m.walker = walker
- conflicts := m.findConflictsWithinSelectionSet(inlineFragment.SelectionSet)
- for _, conflict := range conflicts {
- conflict.addFieldsConflictMessage(addError)
- }
- })
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- m.walker = walker
- conflicts := m.findConflictsWithinSelectionSet(fragment.SelectionSet)
- for _, conflict := range conflicts {
- conflict.addFieldsConflictMessage(addError)
- }
- })
- })
-}
-
-type pairSet struct {
- data map[string]map[string]bool
-}
-
-func (pairSet *pairSet) Add(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) {
- add := func(a *ast.FragmentSpread, b *ast.FragmentSpread) {
- m := pairSet.data[a.Name]
- if m == nil {
- m = make(map[string]bool)
- pairSet.data[a.Name] = m
- }
- m[b.Name] = areMutuallyExclusive
- }
- add(a, b)
- add(b, a)
-}
-
-func (pairSet *pairSet) Has(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) bool {
- am, ok := pairSet.data[a.Name]
- if !ok {
- return false
- }
- result, ok := am[b.Name]
- if !ok {
- return false
- }
-
- // areMutuallyExclusive being false is a superset of being true,
- // hence if we want to know if this PairSet "has" these two with no
- // exclusivity, we have to ensure it was added as such.
- if !areMutuallyExclusive {
- return !result
- }
-
- return true
-}
-
-type sequentialFieldsMap struct {
- // We can't use map[string][]*ast.Field. because map is not stable...
- seq []string
- data map[string][]*ast.Field
-}
-
-type fieldIterateEntry struct {
- ResponseName string
- Fields []*ast.Field
-}
-
-func (m *sequentialFieldsMap) Push(responseName string, field *ast.Field) {
- fields, ok := m.data[responseName]
- if !ok {
- m.seq = append(m.seq, responseName)
- }
- fields = append(fields, field)
- m.data[responseName] = fields
-}
-
-func (m *sequentialFieldsMap) Get(responseName string) ([]*ast.Field, bool) {
- fields, ok := m.data[responseName]
- return fields, ok
-}
-
-func (m *sequentialFieldsMap) Iterator() [][]*ast.Field {
- fieldsList := make([][]*ast.Field, 0, len(m.seq))
- for _, responseName := range m.seq {
- fields := m.data[responseName]
- fieldsList = append(fieldsList, fields)
- }
- return fieldsList
-}
-
-func (m *sequentialFieldsMap) KeyValueIterator() []*fieldIterateEntry {
- fieldEntriesList := make([]*fieldIterateEntry, 0, len(m.seq))
- for _, responseName := range m.seq {
- fields := m.data[responseName]
- fieldEntriesList = append(fieldEntriesList, &fieldIterateEntry{
- ResponseName: responseName,
- Fields: fields,
- })
- }
- return fieldEntriesList
-}
-
-type conflictMessageContainer struct {
- Conflicts []*ConflictMessage
-}
-
-type ConflictMessage struct {
- Message string
- ResponseName string
- Names []string
- SubMessage []*ConflictMessage
- Position *ast.Position
-}
-
-func (m *ConflictMessage) String(buf *bytes.Buffer) {
- if len(m.SubMessage) == 0 {
- buf.WriteString(m.Message)
- return
- }
-
- for idx, subMessage := range m.SubMessage {
- buf.WriteString(`subfields "`)
- buf.WriteString(subMessage.ResponseName)
- buf.WriteString(`" conflict because `)
- subMessage.String(buf)
- if idx != len(m.SubMessage)-1 {
- buf.WriteString(" and ")
- }
- }
-}
-
-func (m *ConflictMessage) addFieldsConflictMessage(addError AddErrFunc) {
- var buf bytes.Buffer
- m.String(&buf)
- addError(
- Message(`Fields "%s" conflict because %s. Use different aliases on the fields to fetch both if this was intentional.`, m.ResponseName, buf.String()),
- At(m.Position),
- )
-}
-
-type overlappingFieldsCanBeMergedManager struct {
- walker *Walker
-
- // per walker
- comparedFragmentPairs pairSet
- // cachedFieldsAndFragmentNames interface{}
-
- // per selectionSet
- comparedFragments map[string]bool
-}
-
-func (m *overlappingFieldsCanBeMergedManager) findConflictsWithinSelectionSet(selectionSet ast.SelectionSet) []*ConflictMessage {
- if len(selectionSet) == 0 {
- return nil
- }
-
- fieldsMap, fragmentSpreads := getFieldsAndFragmentNames(selectionSet)
-
- var conflicts conflictMessageContainer
-
- // (A) Find find all conflicts "within" the fieldMap of this selection set.
- // Note: this is the *only place* `collectConflictsWithin` is called.
- m.collectConflictsWithin(&conflicts, fieldsMap)
-
- m.comparedFragments = make(map[string]bool)
- for idx, fragmentSpreadA := range fragmentSpreads {
- // (B) Then collect conflicts between these fieldMap and those represented by
- // each spread fragment name found.
- m.collectConflictsBetweenFieldsAndFragment(&conflicts, false, fieldsMap, fragmentSpreadA)
-
- for _, fragmentSpreadB := range fragmentSpreads[idx+1:] {
- // (C) Then compare this fragment with all other fragments found in this
- // selection set to collect conflicts between fragments spread together.
- // This compares each item in the list of fragment names to every other
- // item in that same list (except for itself).
- m.collectConflictsBetweenFragments(&conflicts, false, fragmentSpreadA, fragmentSpreadB)
- }
- }
-
- return conflicts.Conflicts
-}
-
-func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFieldsAndFragment(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fieldsMap *sequentialFieldsMap, fragmentSpread *ast.FragmentSpread) {
- if m.comparedFragments[fragmentSpread.Name] {
- return
- }
- m.comparedFragments[fragmentSpread.Name] = true
-
- if fragmentSpread.Definition == nil {
- return
- }
-
- fieldsMapB, fragmentSpreads := getFieldsAndFragmentNames(fragmentSpread.Definition.SelectionSet)
-
- // Do not compare a fragment's fieldMap to itself.
- if reflect.DeepEqual(fieldsMap, fieldsMapB) {
- return
- }
-
- // (D) First collect any conflicts between the provided collection of fields
- // and the collection of fields represented by the given fragment.
- m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMap, fieldsMapB)
-
- // (E) Then collect any conflicts between the provided collection of fields
- // and any fragment names found in the given fragment.
- baseFragmentSpread := fragmentSpread
- for _, fragmentSpread := range fragmentSpreads {
- if fragmentSpread.Name == baseFragmentSpread.Name {
- continue
- }
- m.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsMap, fragmentSpread)
- }
-}
-
-func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFragments(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
-
- var check func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread)
- check = func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
-
- if fragmentSpreadA.Name == fragmentSpreadB.Name {
- return
- }
-
- if m.comparedFragmentPairs.Has(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive) {
- return
- }
- m.comparedFragmentPairs.Add(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive)
-
- if fragmentSpreadA.Definition == nil {
- return
- }
- if fragmentSpreadB.Definition == nil {
- return
- }
-
- fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(fragmentSpreadA.Definition.SelectionSet)
- fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(fragmentSpreadB.Definition.SelectionSet)
-
- // (F) First, collect all conflicts between these two collections of fields
- // (not including any nested fragments).
- m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
-
- // (G) Then collect conflicts between the first fragment and any nested
- // fragments spread in the second fragment.
- for _, fragmentSpread := range fragmentSpreadsB {
- check(fragmentSpreadA, fragmentSpread)
- }
- // (G) Then collect conflicts between the second fragment and any nested
- // fragments spread in the first fragment.
- for _, fragmentSpread := range fragmentSpreadsA {
- check(fragmentSpread, fragmentSpreadB)
- }
- }
-
- check(fragmentSpreadA, fragmentSpreadB)
-}
-
-func (m *overlappingFieldsCanBeMergedManager) findConflictsBetweenSubSelectionSets(areMutuallyExclusive bool, selectionSetA ast.SelectionSet, selectionSetB ast.SelectionSet) *conflictMessageContainer {
- var conflicts conflictMessageContainer
-
- fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(selectionSetA)
- fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(selectionSetB)
-
- // (H) First, collect all conflicts between these two collections of field.
- m.collectConflictsBetween(&conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
-
- // (I) Then collect conflicts between the first collection of fields and
- // those referenced by each fragment name associated with the second.
- for _, fragmentSpread := range fragmentSpreadsB {
- m.comparedFragments = make(map[string]bool)
- m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapA, fragmentSpread)
- }
-
- // (I) Then collect conflicts between the second collection of fields and
- // those referenced by each fragment name associated with the first.
- for _, fragmentSpread := range fragmentSpreadsA {
- m.comparedFragments = make(map[string]bool)
- m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapB, fragmentSpread)
- }
-
- // (J) Also collect conflicts between any fragment names by the first and
- // fragment names by the second. This compares each item in the first set of
- // names to each item in the second set of names.
- for _, fragmentSpreadA := range fragmentSpreadsA {
- for _, fragmentSpreadB := range fragmentSpreadsB {
- m.collectConflictsBetweenFragments(&conflicts, areMutuallyExclusive, fragmentSpreadA, fragmentSpreadB)
- }
- }
-
- if len(conflicts.Conflicts) == 0 {
- return nil
- }
-
- return &conflicts
-}
-
-func (m *overlappingFieldsCanBeMergedManager) collectConflictsWithin(conflicts *conflictMessageContainer, fieldsMap *sequentialFieldsMap) {
- for _, fields := range fieldsMap.Iterator() {
- for idx, fieldA := range fields {
- for _, fieldB := range fields[idx+1:] {
- conflict := m.findConflict(false, fieldA, fieldB)
- if conflict != nil {
- conflicts.Conflicts = append(conflicts.Conflicts, conflict)
- }
- }
- }
- }
-}
-
-func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetween(conflicts *conflictMessageContainer, parentFieldsAreMutuallyExclusive bool, fieldsMapA *sequentialFieldsMap, fieldsMapB *sequentialFieldsMap) {
- for _, fieldsEntryA := range fieldsMapA.KeyValueIterator() {
- fieldsB, ok := fieldsMapB.Get(fieldsEntryA.ResponseName)
- if !ok {
- continue
- }
- for _, fieldA := range fieldsEntryA.Fields {
- for _, fieldB := range fieldsB {
- conflict := m.findConflict(parentFieldsAreMutuallyExclusive, fieldA, fieldB)
- if conflict != nil {
- conflicts.Conflicts = append(conflicts.Conflicts, conflict)
- }
- }
- }
- }
-}
-
-func (m *overlappingFieldsCanBeMergedManager) findConflict(parentFieldsAreMutuallyExclusive bool, fieldA *ast.Field, fieldB *ast.Field) *ConflictMessage {
- if fieldA.Definition == nil || fieldA.ObjectDefinition == nil || fieldB.Definition == nil || fieldB.ObjectDefinition == nil {
- return nil
- }
-
- areMutuallyExclusive := parentFieldsAreMutuallyExclusive
- if !areMutuallyExclusive {
- tmp := fieldA.ObjectDefinition.Name != fieldB.ObjectDefinition.Name
- tmp = tmp && fieldA.ObjectDefinition.Kind == ast.Object
- tmp = tmp && fieldB.ObjectDefinition.Kind == ast.Object
- areMutuallyExclusive = tmp
- }
-
- fieldNameA := fieldA.Name
- if fieldA.Alias != "" {
- fieldNameA = fieldA.Alias
- }
-
- if !areMutuallyExclusive {
- // Two aliases must refer to the same field.
- if fieldA.Name != fieldB.Name {
- return &ConflictMessage{
- ResponseName: fieldNameA,
- Message: fmt.Sprintf(`%s and %s are different fields`, fieldA.Name, fieldB.Name),
- Position: fieldB.Position,
- }
- }
-
- // Two field calls must have the same arguments.
- if !sameArguments(fieldA.Arguments, fieldB.Arguments) {
- return &ConflictMessage{
- ResponseName: fieldNameA,
- Message: "they have differing arguments",
- Position: fieldB.Position,
- }
- }
- }
-
- if doTypesConflict(m.walker, fieldA.Definition.Type, fieldB.Definition.Type) {
- return &ConflictMessage{
- ResponseName: fieldNameA,
- Message: fmt.Sprintf(`they return conflicting types %s and %s`, fieldA.Definition.Type.String(), fieldB.Definition.Type.String()),
- Position: fieldB.Position,
- }
- }
-
- // Collect and compare sub-fields. Use the same "visited fragment names" list
- // for both collections so fields in a fragment reference are never
- // compared to themselves.
- conflicts := m.findConflictsBetweenSubSelectionSets(areMutuallyExclusive, fieldA.SelectionSet, fieldB.SelectionSet)
- if conflicts == nil {
- return nil
- }
- return &ConflictMessage{
- ResponseName: fieldNameA,
- SubMessage: conflicts.Conflicts,
- Position: fieldB.Position,
- }
-}
-
-func sameArguments(args1 []*ast.Argument, args2 []*ast.Argument) bool {
- if len(args1) != len(args2) {
- return false
- }
- for _, arg1 := range args1 {
- for _, arg2 := range args2 {
- if arg1.Name != arg2.Name {
- return false
- }
- if !sameValue(arg1.Value, arg2.Value) {
- return false
- }
- }
- }
- return true
-}
-
-func sameValue(value1 *ast.Value, value2 *ast.Value) bool {
- if value1.Kind != value2.Kind {
- return false
- }
- if value1.Raw != value2.Raw {
- return false
- }
- return true
-}
-
-func doTypesConflict(walker *Walker, type1 *ast.Type, type2 *ast.Type) bool {
- if type1.Elem != nil {
- if type2.Elem != nil {
- return doTypesConflict(walker, type1.Elem, type2.Elem)
- }
- return true
- }
- if type2.Elem != nil {
- return true
- }
- if type1.NonNull && !type2.NonNull {
- return true
- }
- if !type1.NonNull && type2.NonNull {
- return true
- }
-
- t1 := walker.Schema.Types[type1.NamedType]
- t2 := walker.Schema.Types[type2.NamedType]
- if (t1.Kind == ast.Scalar || t1.Kind == ast.Enum) && (t2.Kind == ast.Scalar || t2.Kind == ast.Enum) {
- return t1.Name != t2.Name
- }
-
- return false
-}
-
-func getFieldsAndFragmentNames(selectionSet ast.SelectionSet) (*sequentialFieldsMap, []*ast.FragmentSpread) {
- fieldsMap := sequentialFieldsMap{
- data: make(map[string][]*ast.Field),
- }
- var fragmentSpreads []*ast.FragmentSpread
-
- var walk func(selectionSet ast.SelectionSet)
- walk = func(selectionSet ast.SelectionSet) {
- for _, selection := range selectionSet {
- switch selection := selection.(type) {
- case *ast.Field:
- responseName := selection.Name
- if selection.Alias != "" {
- responseName = selection.Alias
- }
- fieldsMap.Push(responseName, selection)
-
- case *ast.InlineFragment:
- walk(selection.SelectionSet)
-
- case *ast.FragmentSpread:
- fragmentSpreads = append(fragmentSpreads, selection)
- }
- }
- }
- walk(selectionSet)
-
- return &fieldsMap, fragmentSpreads
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go b/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
deleted file mode 100644
index 04611834..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
+++ /dev/null
@@ -1,68 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("PossibleFragmentSpreads", func(observers *Events, addError AddErrFunc) {
-
- validate := func(walker *Walker, parentDef *ast.Definition, fragmentName string, emitError func()) {
- if parentDef == nil {
- return
- }
-
- var parentDefs []*ast.Definition
- switch parentDef.Kind {
- case ast.Object:
- parentDefs = []*ast.Definition{parentDef}
- case ast.Interface, ast.Union:
- parentDefs = walker.Schema.GetPossibleTypes(parentDef)
- default:
- return
- }
-
- fragmentDefType := walker.Schema.Types[fragmentName]
- if fragmentDefType == nil {
- return
- }
- if !fragmentDefType.IsCompositeType() {
- // checked by FragmentsOnCompositeTypes
- return
- }
- fragmentDefs := walker.Schema.GetPossibleTypes(fragmentDefType)
-
- for _, fragmentDef := range fragmentDefs {
- for _, parentDef := range parentDefs {
- if parentDef.Name == fragmentDef.Name {
- return
- }
- }
- }
-
- emitError()
- }
-
- observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
- validate(walker, inlineFragment.ObjectDefinition, inlineFragment.TypeCondition, func() {
- addError(
- Message(`Fragment cannot be spread here as objects of type "%s" can never be of type "%s".`, inlineFragment.ObjectDefinition.Name, inlineFragment.TypeCondition),
- At(inlineFragment.Position),
- )
- })
- })
-
- observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
- if fragmentSpread.Definition == nil {
- return
- }
- validate(walker, fragmentSpread.ObjectDefinition, fragmentSpread.Definition.TypeCondition, func() {
- addError(
- Message(`Fragment "%s" cannot be spread here as objects of type "%s" can never be of type "%s".`, fragmentSpread.Name, fragmentSpread.ObjectDefinition.Name, fragmentSpread.Definition.TypeCondition),
- At(fragmentSpread.Position),
- )
- })
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go b/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
deleted file mode 100644
index 55791a6b..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) {
-
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if field.Definition == nil {
- return
- }
-
- argDef:
- for _, argDef := range field.Definition.Arguments {
- if !argDef.Type.NonNull {
- continue
- }
- if argDef.DefaultValue != nil {
- continue
- }
- for _, arg := range field.Arguments {
- if arg.Name == argDef.Name {
- continue argDef
- }
- }
-
- addError(
- Message(`Field "%s" argument "%s" of type "%s" is required but not provided.`, field.Name, argDef.Name, argDef.Type.String()),
- At(field.Position),
- )
- }
- })
-
- observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
- if directive.Definition == nil {
- return
- }
-
- argDef:
- for _, argDef := range directive.Definition.Arguments {
- if !argDef.Type.NonNull {
- continue
- }
- if argDef.DefaultValue != nil {
- continue
- }
- for _, arg := range directive.Arguments {
- if arg.Name == argDef.Name {
- continue argDef
- }
- }
-
- addError(
- Message(`Directive "@%s" argument "%s" of type "%s" is required but not provided.`, directive.Definition.Name, argDef.Name, argDef.Type.String()),
- At(directive.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go b/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
deleted file mode 100644
index bb961f44..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("ScalarLeafs", func(observers *Events, addError AddErrFunc) {
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if field.Definition == nil {
- return
- }
-
- fieldType := walker.Schema.Types[field.Definition.Type.Name()]
- if fieldType == nil {
- return
- }
-
- if fieldType.IsLeafType() && len(field.SelectionSet) > 0 {
- addError(
- Message(`Field "%s" must not have a selection since type "%s" has no subfields.`, field.Name, fieldType.Name),
- At(field.Position),
- )
- }
-
- if !fieldType.IsLeafType() && len(field.SelectionSet) == 0 {
- addError(
- Message(`Field "%s" of type "%s" must have a selection of subfields.`, field.Name, field.Definition.Type.String()),
- Suggestf(`"%s { ... }"`, field.Name),
- At(field.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go b/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
deleted file mode 100644
index 53003c11..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package validator
-
-import (
- "strconv"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("SingleFieldSubscriptions", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- if operation.Operation != ast.Subscription {
- return
- }
-
- if len(operation.SelectionSet) > 1 {
- name := "Anonymous Subscription"
- if operation.Name != "" {
- name = `Subscription ` + strconv.Quote(operation.Name)
- }
-
- addError(
- Message(`%s must select only one top level field.`, name),
- At(operation.SelectionSet[1].GetPosition()),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
deleted file mode 100644
index 0ddcde72..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
+++ /dev/null
@@ -1,33 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) {
- observers.OnField(func(walker *Walker, field *ast.Field) {
- checkUniqueArgs(field.Arguments, addError)
- })
-
- observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
- checkUniqueArgs(directive.Arguments, addError)
- })
- })
-}
-
-func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) {
- knownArgNames := map[string]bool{}
-
- for _, arg := range args {
- if knownArgNames[arg.Name] {
- addError(
- Message(`There can be only one argument named "%s".`, arg.Name),
- At(arg.Position),
- )
- }
-
- knownArgNames[arg.Name] = true
- }
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
deleted file mode 100644
index 077c4687..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) {
- observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) {
- seen := map[string]bool{}
-
- for _, dir := range directives {
- if seen[dir.Name] {
- addError(
- Message(`The directive "%s" can only be used once at this location.`, dir.Name),
- At(dir.Position),
- )
- }
- seen[dir.Name] = true
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
deleted file mode 100644
index 46a8b7c7..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) {
- seenFragments := map[string]bool{}
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- if seenFragments[fragment.Name] {
- addError(
- Message(`There can be only one fragment named "%s".`, fragment.Name),
- At(fragment.Position),
- )
- }
- seenFragments[fragment.Name] = true
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
deleted file mode 100644
index f254d588..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
+++ /dev/null
@@ -1,27 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) {
- observers.OnValue(func(walker *Walker, value *ast.Value) {
- if value.Kind != ast.ObjectValue {
- return
- }
-
- seen := map[string]bool{}
- for _, field := range value.Children {
- if seen[field.Name] {
- addError(
- Message(`There can be only one input field named "%s".`, field.Name),
- At(field.Position),
- )
- }
- seen[field.Name] = true
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
deleted file mode 100644
index c1ab56be..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) {
- seen := map[string]bool{}
-
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- if seen[operation.Name] {
- addError(
- Message(`There can be only one operation named "%s".`, operation.Name),
- At(operation.Position),
- )
- }
- seen[operation.Name] = true
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
deleted file mode 100644
index 70590a88..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- seen := map[string]bool{}
- for _, def := range operation.VariableDefinitions {
- if seen[def.Variable] {
- addError(
- Message(`There can be only one variable named "%s".`, def.Variable),
- At(def.Position),
- )
- }
- seen[def.Variable] = true
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
deleted file mode 100644
index d64cc666..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
+++ /dev/null
@@ -1,130 +0,0 @@
-package validator
-
-import (
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) {
- observers.OnValue(func(walker *Walker, value *ast.Value) {
- if value.Definition == nil || value.ExpectedType == nil {
- return
- }
-
- if value.Definition.Kind == ast.Scalar {
- // Skip custom validating scalars
- if !value.Definition.OneOf("Int", "Float", "String", "Boolean", "ID") {
- return
- }
- }
-
- var possibleEnums []string
- if value.Definition.Kind == ast.Enum {
- for _, val := range value.Definition.EnumValues {
- possibleEnums = append(possibleEnums, val.Name)
- }
- }
-
- rawVal, err := value.Value(nil)
- if err != nil {
- unexpectedTypeMessage(addError, value)
- }
-
- switch value.Kind {
- case ast.NullValue:
- if value.ExpectedType.NonNull {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.ListValue:
- if value.ExpectedType.Elem == nil {
- unexpectedTypeMessage(addError, value)
- return
- }
-
- case ast.IntValue:
- if !value.Definition.OneOf("Int", "Float", "ID") {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.FloatValue:
- if !value.Definition.OneOf("Float") {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.StringValue, ast.BlockValue:
- if value.Definition.Kind == ast.Enum {
- rawValStr := fmt.Sprint(rawVal)
- addError(
- Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
- SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
- At(value.Position),
- )
- } else if !value.Definition.OneOf("String", "ID") {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.EnumValue:
- if value.Definition.Kind != ast.Enum || value.Definition.EnumValues.ForName(value.Raw) == nil {
- rawValStr := fmt.Sprint(rawVal)
- addError(
- Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
- SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
- At(value.Position),
- )
- }
-
- case ast.BooleanValue:
- if !value.Definition.OneOf("Boolean") {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.ObjectValue:
-
- for _, field := range value.Definition.Fields {
- if field.Type.NonNull {
- fieldValue := value.Children.ForName(field.Name)
- if fieldValue == nil && field.DefaultValue == nil {
- addError(
- Message("Field %s.%s of required type %s was not provided.", value.Definition.Name, field.Name, field.Type.String()),
- At(value.Position),
- )
- continue
- }
- }
- }
-
- for _, fieldValue := range value.Children {
- if value.Definition.Fields.ForName(fieldValue.Name) == nil {
- var suggestions []string
- for _, fieldValue := range value.Definition.Fields {
- suggestions = append(suggestions, fieldValue.Name)
- }
-
- addError(
- Message(`Field "%s" is not defined by type %s.`, fieldValue.Name, value.Definition.Name),
- SuggestListUnquoted("Did you mean", fieldValue.Name, suggestions),
- At(fieldValue.Position),
- )
- }
- }
-
- case ast.Variable:
- return
-
- default:
- panic(fmt.Errorf("unhandled %T", value))
- }
- })
- })
-}
-
-func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) {
- addError(
- Message("Expected type %s, found %s.", v.ExpectedType.String(), v.String()),
- At(v.Position),
- )
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
deleted file mode 100644
index 9d58ae1c..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- for _, def := range operation.VariableDefinitions {
- if def.Definition == nil {
- continue
- }
- if !def.Definition.IsInputType() {
- addError(
- Message(
- `Variable "$%s" cannot be non-input type "%s".`,
- def.Variable,
- def.Type.String(),
- ),
- At(def.Position),
- )
- }
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
deleted file mode 100644
index e6d97c9f..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("VariablesInAllowedPosition", func(observers *Events, addError AddErrFunc) {
- observers.OnValue(func(walker *Walker, value *ast.Value) {
- if value.Kind != ast.Variable || value.ExpectedType == nil || value.VariableDefinition == nil || walker.CurrentOperation == nil {
- return
- }
-
- // todo: move me into walk
- // If there is a default non nullable types can be null
- if value.VariableDefinition.DefaultValue != nil && value.VariableDefinition.DefaultValue.Kind != ast.NullValue {
- if value.ExpectedType.NonNull {
- value.ExpectedType.NonNull = false
- }
- }
-
- if !value.VariableDefinition.Type.IsCompatible(value.ExpectedType) {
- addError(
- Message(
- `Variable "%s" of type "%s" used in position expecting type "%s".`,
- value,
- value.VariableDefinition.Type.String(),
- value.ExpectedType.String(),
- ),
- At(value.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/schema.go b/vendor/github.com/vektah/gqlparser/validator/schema.go
deleted file mode 100644
index 57d2022e..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/schema.go
+++ /dev/null
@@ -1,276 +0,0 @@
-//go:generate go run ./inliner/inliner.go
-
-package validator
-
-import (
- "strconv"
- "strings"
-
- . "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
- "github.com/vektah/gqlparser/parser"
-)
-
-func LoadSchema(inputs ...*Source) (*Schema, *gqlerror.Error) {
- ast, err := parser.ParseSchemas(inputs...)
- if err != nil {
- return nil, err
- }
- return ValidateSchemaDocument(ast)
-}
-
-func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, *gqlerror.Error) {
- schema := Schema{
- Types: map[string]*Definition{},
- Directives: map[string]*DirectiveDefinition{},
- PossibleTypes: map[string][]*Definition{},
- Implements: map[string][]*Definition{},
- }
-
- for i, def := range ast.Definitions {
- if schema.Types[def.Name] != nil {
- return nil, gqlerror.ErrorPosf(def.Position, "Cannot redeclare type %s.", def.Name)
- }
- schema.Types[def.Name] = ast.Definitions[i]
- }
-
- for _, ext := range ast.Extensions {
- def := schema.Types[ext.Name]
- if def == nil {
- return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because it does not exist.", ext.Name)
- }
-
- if def.Kind != ext.Kind {
- return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because the base type is a %s, not %s.", ext.Name, def.Kind, ext.Kind)
- }
-
- def.Directives = append(def.Directives, ext.Directives...)
- def.Interfaces = append(def.Interfaces, ext.Interfaces...)
- def.Fields = append(def.Fields, ext.Fields...)
- def.Types = append(def.Types, ext.Types...)
- def.EnumValues = append(def.EnumValues, ext.EnumValues...)
- }
-
- for _, def := range ast.Definitions {
- switch def.Kind {
- case Union:
- for _, t := range def.Types {
- schema.AddPossibleType(def.Name, schema.Types[t])
- schema.AddImplements(t, def)
- }
- case InputObject, Object:
- for _, intf := range def.Interfaces {
- schema.AddPossibleType(intf, def)
- schema.AddImplements(def.Name, schema.Types[intf])
- }
- schema.AddPossibleType(def.Name, def)
- }
- }
-
- for i, dir := range ast.Directives {
- if schema.Directives[dir.Name] != nil {
- return nil, gqlerror.ErrorPosf(dir.Position, "Cannot redeclare directive %s.", dir.Name)
- }
- schema.Directives[dir.Name] = ast.Directives[i]
- }
-
- if len(ast.Schema) > 1 {
- return nil, gqlerror.ErrorPosf(ast.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.")
- }
-
- if len(ast.Schema) == 1 {
- for _, entrypoint := range ast.Schema[0].OperationTypes {
- def := schema.Types[entrypoint.Type]
- if def == nil {
- return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type)
- }
- switch entrypoint.Operation {
- case Query:
- schema.Query = def
- case Mutation:
- schema.Mutation = def
- case Subscription:
- schema.Subscription = def
- }
- }
- }
-
- for _, ext := range ast.SchemaExtension {
- for _, entrypoint := range ext.OperationTypes {
- def := schema.Types[entrypoint.Type]
- if def == nil {
- return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type)
- }
- switch entrypoint.Operation {
- case Query:
- schema.Query = def
- case Mutation:
- schema.Mutation = def
- case Subscription:
- schema.Subscription = def
- }
- }
- }
-
- for _, typ := range schema.Types {
- err := validateDefinition(&schema, typ)
- if err != nil {
- return nil, err
- }
- }
-
- for _, dir := range schema.Directives {
- err := validateDirective(&schema, dir)
- if err != nil {
- return nil, err
- }
- }
-
- if schema.Query == nil && schema.Types["Query"] != nil {
- schema.Query = schema.Types["Query"]
- }
-
- if schema.Mutation == nil && schema.Types["Mutation"] != nil {
- schema.Mutation = schema.Types["Mutation"]
- }
-
- if schema.Subscription == nil && schema.Types["Subscription"] != nil {
- schema.Subscription = schema.Types["Subscription"]
- }
-
- if schema.Query != nil {
- schema.Query.Fields = append(
- schema.Query.Fields,
- &FieldDefinition{
- Name: "__schema",
- Type: NonNullNamedType("__Schema", nil),
- },
- &FieldDefinition{
- Name: "__type",
- Type: NonNullNamedType("__Type", nil),
- Arguments: ArgumentDefinitionList{
- {Name: "name", Type: NamedType("String", nil)},
- },
- },
- )
- }
-
- return &schema, nil
-}
-
-func validateDirective(schema *Schema, def *DirectiveDefinition) *gqlerror.Error {
- if err := validateName(def.Position, def.Name); err != nil {
- // now, GraphQL spec doesn't have reserved directive name
- return err
- }
-
- return validateArgs(schema, def.Arguments, def)
-}
-
-func validateDefinition(schema *Schema, def *Definition) *gqlerror.Error {
- for _, field := range def.Fields {
- if err := validateName(field.Position, field.Name); err != nil {
- // now, GraphQL spec doesn't have reserved field name
- return err
- }
- if err := validateTypeRef(schema, field.Type); err != nil {
- return err
- }
- if err := validateArgs(schema, field.Arguments, nil); err != nil {
- return err
- }
- if err := validateDirectives(schema, field.Directives, nil); err != nil {
- return err
- }
- }
-
- for _, intf := range def.Interfaces {
- intDef := schema.Types[intf]
- if intDef == nil {
- return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intf))
- }
- if intDef.Kind != Interface {
- return gqlerror.ErrorPosf(def.Position, "%s is a non interface type %s.", strconv.Quote(intf), intDef.Kind)
- }
- }
-
- switch def.Kind {
- case Object, Interface:
- if len(def.Fields) == 0 {
- return gqlerror.ErrorPosf(def.Position, "%s must define one or more fields.", def.Kind)
- }
- case Enum:
- if len(def.EnumValues) == 0 {
- return gqlerror.ErrorPosf(def.Position, "%s must define one or more unique enum values.", def.Kind)
- }
- case InputObject:
- if len(def.Fields) == 0 {
- return gqlerror.ErrorPosf(def.Position, "%s must define one or more input fields.", def.Kind)
- }
- }
-
- for idx, field1 := range def.Fields {
- for _, field2 := range def.Fields[idx+1:] {
- if field1.Name == field2.Name {
- return gqlerror.ErrorPosf(field2.Position, "Field %s.%s can only be defined once.", def.Name, field2.Name)
- }
- }
- }
-
- if !def.BuiltIn {
- // GraphQL spec has reserved type names a lot!
- err := validateName(def.Position, def.Name)
- if err != nil {
- return err
- }
- }
-
- return validateDirectives(schema, def.Directives, nil)
-}
-
-func validateTypeRef(schema *Schema, typ *Type) *gqlerror.Error {
- if schema.Types[typ.Name()] == nil {
- return gqlerror.ErrorPosf(typ.Position, "Undefined type %s.", typ.Name())
- }
- return nil
-}
-
-func validateArgs(schema *Schema, args ArgumentDefinitionList, currentDirective *DirectiveDefinition) *gqlerror.Error {
- for _, arg := range args {
- if err := validateName(arg.Position, arg.Name); err != nil {
- // now, GraphQL spec doesn't have reserved argument name
- return err
- }
- if err := validateTypeRef(schema, arg.Type); err != nil {
- return err
- }
- if err := validateDirectives(schema, arg.Directives, currentDirective); err != nil {
- return err
- }
- }
- return nil
-}
-
-func validateDirectives(schema *Schema, dirs DirectiveList, currentDirective *DirectiveDefinition) *gqlerror.Error {
- for _, dir := range dirs {
- if err := validateName(dir.Position, dir.Name); err != nil {
- // now, GraphQL spec doesn't have reserved directive name
- return err
- }
- if currentDirective != nil && dir.Name == currentDirective.Name {
- return gqlerror.ErrorPosf(dir.Position, "Directive %s cannot refer to itself.", currentDirective.Name)
- }
- if schema.Directives[dir.Name] == nil {
- return gqlerror.ErrorPosf(dir.Position, "Undefined directive %s.", dir.Name)
- }
- dir.Definition = schema.Directives[dir.Name]
- }
- return nil
-}
-
-func validateName(pos *Position, name string) *gqlerror.Error {
- if strings.HasPrefix(name, "__") {
- return gqlerror.ErrorPosf(pos, `Name "%s" must not begin with "__", which is reserved by GraphQL introspection.`, name)
- }
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/schema_test.yml b/vendor/github.com/vektah/gqlparser/validator/schema_test.yml
deleted file mode 100644
index abc8dd7e..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/schema_test.yml
+++ /dev/null
@@ -1,323 +0,0 @@
-types:
- - name: cannot be redeclared
- input: |
- type A {
- name: String
- }
- type A {
- name: String
- }
- error:
- message: "Cannot redeclare type A."
- locations: [{line: 4, column: 6}]
- - name: cannot be duplicated field at same definition 1
- input: |
- type A {
- name: String
- name: String
- }
- error:
- message: "Field A.name can only be defined once."
- locations: [{line: 3, column: 3}]
- - name: cannot be duplicated field at same definition 2
- input: |
- type A {
- name: String
- }
- extend type A {
- name: String
- }
- error:
- message: "Field A.name can only be defined once."
- locations: [{line: 5, column: 3}]
- - name: cannot be duplicated field at same definition 3
- input: |
- type A {
- name: String
- }
- extend type A {
- age: Int
- age: Int
- }
- error:
- message: "Field A.age can only be defined once."
- locations: [{line: 6, column: 3}]
-
-object types:
- - name: must define one or more fields
- input: |
- directive @D on OBJECT
-
- # This pattern rejected by parser
- # type InvalidObject1 {}
-
- type InvalidObject2 @D
-
- type ValidObject {
- id: ID
- }
- extend type ValidObject @D
- extend type ValidObject {
- b: Int
- }
- error:
- message: 'OBJECT must define one or more fields.'
- locations: [{line: 6, column: 6}]
- - name: check reserved names on type name
- input: |
- type __FooBar {
- id: ID
- }
- error:
- message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 6}]
- - name: check reserved names on type field
- input: |
- type FooBar {
- __id: ID
- }
- error:
- message: 'Name "__id" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 2, column: 3}]
-
- - name: check reserved names on type field argument
- input: |
- type FooBar {
- foo(__bar: ID): ID
- }
- error:
- message: 'Name "__bar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 2, column: 7}]
-
-interfaces:
- - name: must exist
- input: |
- type Thing implements Object {
- id: ID!
- }
-
- type Query {
- Things: [Thing!]!
- }
- error:
- message: 'Undefined type "Object".'
- locations: [{line: 1, column: 6}]
-
- - name: must be an interface
- input: |
- type Thing implements Object {
- id: ID!
- }
-
- type Query {
- Things: [Thing!]!
- }
-
- type Object {
- name: String
- }
- error:
- message: '"Object" is a non interface type OBJECT.'
- locations: [{line: 1, column: 6}]
-
- - name: must define one or more fields
- input: |
- directive @D on INTERFACE
-
- # This pattern rejected by parser
- # interface InvalidInterface1 {}
-
- interface InvalidInterface2 @D
-
- interface ValidInterface {
- id: ID
- }
- extend interface ValidInterface @D
- extend interface ValidInterface {
- b: Int
- }
- error:
- message: 'INTERFACE must define one or more fields.'
- locations: [{line: 6, column: 11}]
- - name: check reserved names on type name
- input: |
- interface __FooBar {
- id: ID
- }
- error:
- message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 11}]
-
-inputs:
- - name: must define one or more input fields
- input: |
- directive @D on INPUT_OBJECT
-
- # This pattern rejected by parser
- # input InvalidInput1 {}
-
- input InvalidInput2 @D
-
- input ValidInput {
- id: ID
- }
- extend input ValidInput @D
- extend input ValidInput {
- b: Int
- }
- error:
- message: 'INPUT_OBJECT must define one or more input fields.'
- locations: [{line: 6, column: 7}]
- - name: check reserved names on type name
- input: |
- input __FooBar {
- id: ID
- }
- error:
- message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 7}]
-
-enums:
- - name: must define one or more unique enum values
- input: |
- directive @D on ENUM
-
- # This pattern rejected by parser
- # enum InvalidEmum1 {}
-
- enum InvalidEnum2 @D
-
- enum ValidEnum {
- FOO
- }
- extend enum ValidEnum @D
- extend enum ValidEnum {
- BAR
- }
- error:
- message: 'ENUM must define one or more unique enum values.'
- locations: [{line: 6, column: 6}]
- - name: check reserved names on type name
- input: |
- enum __FooBar {
- A
- B
- }
- error:
- message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 6}]
-
-type extensions:
- - name: cannot extend non existant types
- input: |
- extend type A {
- name: String
- }
- error:
- message: "Cannot extend type A because it does not exist."
- locations: [{line: 1, column: 13}]
-
- - name: cannot extend incorret type existant types
- input: |
- scalar A
- extend type A {
- name: String
- }
- error:
- message: "Cannot extend type A because the base type is a SCALAR, not OBJECT."
- locations: [{line: 2, column: 13}]
-
-directives:
- - name: cannot redeclare directives
- input: |
- directive @A on FIELD_DEFINITION
- directive @A on FIELD_DEFINITION
- error:
- message: "Cannot redeclare directive A."
- locations: [{line: 2, column: 12}]
-
- - name: must be declared
- input: |
- type User {
- name: String @foo
- }
- error:
- message: "Undefined directive foo."
- locations: [{line: 2, column: 17}]
-
- - name: cannot be self-referential
- input: |
- directive @A(foo: Int! @A) on FIELD_DEFINITION
- error:
- message: "Directive A cannot refer to itself."
- locations: [{line: 1, column: 25}]
- - name: check reserved names on type name
- input: |
- directive @__A on FIELD_DEFINITION
- error:
- message: 'Name "__A" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 12}]
-
-entry points:
- - name: multiple schema entry points
- input: |
- schema {
- query: Query
- }
- schema {
- query: Query
- }
- scalar Query
- error:
- message: "Cannot have multiple schema entry points, consider schema extensions instead."
- locations: [{line: 4, column: 8}]
-
- - name: Undefined schema entrypoint
- input: |
- schema {
- query: Query
- }
- error:
- message: "Schema root query refers to a type Query that does not exist."
- locations: [{line: 2, column: 3}]
-
-entry point extensions:
- - name: Undefined schema entrypoint
- input: |
- schema {
- query: Query
- }
- scalar Query
- extend schema {
- mutation: Mutation
- }
- error:
- message: "Schema root mutation refers to a type Mutation that does not exist."
- locations: [{line: 6, column: 3}]
-
-type references:
- - name: Field types
- input: |
- type User {
- posts: Post
- }
- error:
- message: "Undefined type Post."
- locations: [{line: 2, column: 10}]
-
- - name: Arg types
- input: |
- type User {
- posts(foo: FooBar): String
- }
- error:
- message: "Undefined type FooBar."
- locations: [{line: 2, column: 14}]
-
- - name: Directive arg types
- input: |
- directive @Foo(foo: FooBar) on FIELD_DEFINITION
-
- error:
- message: "Undefined type FooBar."
- locations: [{line: 1, column: 21}]
diff --git a/vendor/github.com/vektah/gqlparser/validator/suggestionList.go b/vendor/github.com/vektah/gqlparser/validator/suggestionList.go
deleted file mode 100644
index f58d0fc2..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/suggestionList.go
+++ /dev/null
@@ -1,69 +0,0 @@
-package validator
-
-import (
- "sort"
- "strings"
-
- "github.com/agnivade/levenshtein"
-)
-
-// Given an invalid input string and a list of valid options, returns a filtered
-// list of valid options sorted based on their similarity with the input.
-func SuggestionList(input string, options []string) []string {
- var results []string
- optionsByDistance := map[string]int{}
-
- for _, option := range options {
- distance := lexicalDistance(input, option)
- threshold := calcThreshold(input, option)
- if distance <= threshold {
- results = append(results, option)
- optionsByDistance[option] = distance
- }
- }
-
- sort.Slice(results, func(i, j int) bool {
- return optionsByDistance[results[i]] < optionsByDistance[results[j]]
- })
- return results
-}
-
-func calcThreshold(a, b string) (threshold int) {
- if len(a) >= len(b) {
- threshold = len(a) / 2
- } else {
- threshold = len(b) / 2
- }
- if threshold < 1 {
- threshold = 1
- }
- return
-}
-
-// Computes the lexical distance between strings A and B.
-//
-// The "distance" between two strings is given by counting the minimum number
-// of edits needed to transform string A into string B. An edit can be an
-// insertion, deletion, or substitution of a single character, or a swap of two
-// adjacent characters.
-//
-// Includes a custom alteration from Damerau-Levenshtein to treat case changes
-// as a single edit which helps identify mis-cased values with an edit distance
-// of 1.
-//
-// This distance can be useful for detecting typos in input or sorting
-func lexicalDistance(a, b string) int {
- if a == b {
- return 0
- }
-
- a = strings.ToLower(a)
- b = strings.ToLower(b)
-
- // Any case change counts as a single edit
- if a == b {
- return 1
- }
-
- return levenshtein.ComputeDistance(a, b)
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/validator.go b/vendor/github.com/vektah/gqlparser/validator/validator.go
deleted file mode 100644
index bbacec6f..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/validator.go
+++ /dev/null
@@ -1,44 +0,0 @@
-package validator
-
-import (
- . "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
-)
-
-type AddErrFunc func(options ...ErrorOption)
-
-type ruleFunc func(observers *Events, addError AddErrFunc)
-
-type rule struct {
- name string
- rule ruleFunc
-}
-
-var rules []rule
-
-// addRule to rule set.
-// f is called once each time `Validate` is executed.
-func AddRule(name string, f ruleFunc) {
- rules = append(rules, rule{name: name, rule: f})
-}
-
-func Validate(schema *Schema, doc *QueryDocument) gqlerror.List {
- var errs gqlerror.List
-
- observers := &Events{}
- for i := range rules {
- rule := rules[i]
- rule.rule(observers, func(options ...ErrorOption) {
- err := &gqlerror.Error{
- Rule: rule.name,
- }
- for _, o := range options {
- o(err)
- }
- errs = append(errs, err)
- })
- }
-
- Walk(schema, doc, observers)
- return errs
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/vars.go b/vendor/github.com/vektah/gqlparser/validator/vars.go
deleted file mode 100644
index aaf3a0d1..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/vars.go
+++ /dev/null
@@ -1,199 +0,0 @@
-package validator
-
-import (
- "reflect"
-
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
-)
-
-var UnexpectedType = fmt.Errorf("Unexpected Type")
-
-// VariableValues coerces and validates variable values
-func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables map[string]interface{}) (map[string]interface{}, *gqlerror.Error) {
- coercedVars := map[string]interface{}{}
-
- validator := varValidator{
- path: []interface{}{"variable"},
- schema: schema,
- }
-
- for _, v := range op.VariableDefinitions {
- validator.path = append(validator.path, v.Variable)
-
- if !v.Definition.IsInputType() {
- return nil, gqlerror.ErrorPathf(validator.path, "must an input type")
- }
-
- val, hasValue := variables[v.Variable]
- if !hasValue {
- if v.DefaultValue != nil {
- var err error
- val, err = v.DefaultValue.Value(nil)
- if err != nil {
- return nil, gqlerror.WrapPath(validator.path, err)
- }
- hasValue = true
- } else if v.Type.NonNull {
- return nil, gqlerror.ErrorPathf(validator.path, "must be defined")
- }
- }
-
- if hasValue {
- if val == nil {
- if v.Type.NonNull {
- return nil, gqlerror.ErrorPathf(validator.path, "cannot be null")
- }
- coercedVars[v.Variable] = nil
- } else {
- rv := reflect.ValueOf(val)
- if rv.Kind() == reflect.Ptr || rv.Kind() == reflect.Interface {
- rv = rv.Elem()
- }
-
- if err := validator.validateVarType(v.Type, rv); err != nil {
- return nil, err
- }
-
- coercedVars[v.Variable] = val
- }
- }
-
- validator.path = validator.path[0 : len(validator.path)-1]
- }
-
- return coercedVars, nil
-}
-
-type varValidator struct {
- path []interface{}
- schema *ast.Schema
-}
-
-func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) *gqlerror.Error {
- currentPath := v.path
- resetPath := func() {
- v.path = currentPath
- }
- defer resetPath()
-
- if typ.Elem != nil {
- if val.Kind() != reflect.Slice {
- return gqlerror.ErrorPathf(v.path, "must be an array")
- }
-
- for i := 0; i < val.Len(); i++ {
- resetPath()
- v.path = append(v.path, i)
- field := val.Index(i)
-
- if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface {
- if typ.Elem.NonNull && field.IsNil() {
- return gqlerror.ErrorPathf(v.path, "cannot be null")
- }
- field = field.Elem()
- }
-
- if err := v.validateVarType(typ.Elem, field); err != nil {
- return err
- }
- }
-
- return nil
- }
-
- def := v.schema.Types[typ.NamedType]
- if def == nil {
- panic(fmt.Errorf("missing def for %s", typ.NamedType))
- }
-
- switch def.Kind {
- case ast.Enum:
- kind := val.Type().Kind()
- if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String {
- return nil
- }
- return gqlerror.ErrorPathf(v.path, "enums must be ints or strings")
- case ast.Scalar:
- kind := val.Type().Kind()
- switch typ.NamedType {
- case "Int":
- if kind == reflect.String || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 {
- return nil
- }
- case "Float":
- if kind == reflect.String || kind == reflect.Float32 || kind == reflect.Float64 || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 {
- return nil
- }
- case "String":
- if kind == reflect.String {
- return nil
- }
-
- case "Boolean":
- if kind == reflect.Bool {
- return nil
- }
-
- case "ID":
- if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String {
- return nil
- }
- default:
- // assume custom scalars are ok
- return nil
- }
- return gqlerror.ErrorPathf(v.path, "cannot use %s as %s", kind.String(), typ.NamedType)
- case ast.InputObject:
- if val.Kind() != reflect.Map {
- return gqlerror.ErrorPathf(v.path, "must be a %s", def.Name)
- }
-
- // check for unknown fields
- for _, name := range val.MapKeys() {
- val.MapIndex(name)
- fieldDef := def.Fields.ForName(name.String())
- resetPath()
- v.path = append(v.path, name.String())
-
- if fieldDef == nil {
- return gqlerror.ErrorPathf(v.path, "unknown field")
- }
- }
-
- for _, fieldDef := range def.Fields {
- resetPath()
- v.path = append(v.path, fieldDef.Name)
-
- field := val.MapIndex(reflect.ValueOf(fieldDef.Name))
- if !field.IsValid() {
- if fieldDef.Type.NonNull {
- return gqlerror.ErrorPathf(v.path, "must be defined")
- }
- continue
- }
-
- if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface {
- if fieldDef.Type.NonNull && field.IsNil() {
- return gqlerror.ErrorPathf(v.path, "cannot be null")
- }
- //allow null object field and skip it
- if !fieldDef.Type.NonNull && field.IsNil() {
- continue
- }
- field = field.Elem()
- }
-
- err := v.validateVarType(fieldDef.Type, field)
- if err != nil {
- return err
- }
- }
- default:
- panic(fmt.Errorf("unsupported type %s", def.Kind))
- }
-
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/walk.go b/vendor/github.com/vektah/gqlparser/validator/walk.go
deleted file mode 100644
index 751ba1f1..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/walk.go
+++ /dev/null
@@ -1,286 +0,0 @@
-package validator
-
-import (
- "context"
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
-)
-
-type Events struct {
- operationVisitor []func(walker *Walker, operation *ast.OperationDefinition)
- field []func(walker *Walker, field *ast.Field)
- fragment []func(walker *Walker, fragment *ast.FragmentDefinition)
- inlineFragment []func(walker *Walker, inlineFragment *ast.InlineFragment)
- fragmentSpread []func(walker *Walker, fragmentSpread *ast.FragmentSpread)
- directive []func(walker *Walker, directive *ast.Directive)
- directiveList []func(walker *Walker, directives []*ast.Directive)
- value []func(walker *Walker, value *ast.Value)
-}
-
-func (o *Events) OnOperation(f func(walker *Walker, operation *ast.OperationDefinition)) {
- o.operationVisitor = append(o.operationVisitor, f)
-}
-func (o *Events) OnField(f func(walker *Walker, field *ast.Field)) {
- o.field = append(o.field, f)
-}
-func (o *Events) OnFragment(f func(walker *Walker, fragment *ast.FragmentDefinition)) {
- o.fragment = append(o.fragment, f)
-}
-func (o *Events) OnInlineFragment(f func(walker *Walker, inlineFragment *ast.InlineFragment)) {
- o.inlineFragment = append(o.inlineFragment, f)
-}
-func (o *Events) OnFragmentSpread(f func(walker *Walker, fragmentSpread *ast.FragmentSpread)) {
- o.fragmentSpread = append(o.fragmentSpread, f)
-}
-func (o *Events) OnDirective(f func(walker *Walker, directive *ast.Directive)) {
- o.directive = append(o.directive, f)
-}
-func (o *Events) OnDirectiveList(f func(walker *Walker, directives []*ast.Directive)) {
- o.directiveList = append(o.directiveList, f)
-}
-func (o *Events) OnValue(f func(walker *Walker, value *ast.Value)) {
- o.value = append(o.value, f)
-}
-
-func Walk(schema *ast.Schema, document *ast.QueryDocument, observers *Events) {
- w := Walker{
- Observers: observers,
- Schema: schema,
- Document: document,
- }
-
- w.walk()
-}
-
-type Walker struct {
- Context context.Context
- Observers *Events
- Schema *ast.Schema
- Document *ast.QueryDocument
-
- validatedFragmentSpreads map[string]bool
- CurrentOperation *ast.OperationDefinition
-}
-
-func (w *Walker) walk() {
- for _, child := range w.Document.Operations {
- w.validatedFragmentSpreads = make(map[string]bool)
- w.walkOperation(child)
- }
- for _, child := range w.Document.Fragments {
- w.validatedFragmentSpreads = make(map[string]bool)
- w.walkFragment(child)
- }
-}
-
-func (w *Walker) walkOperation(operation *ast.OperationDefinition) {
- w.CurrentOperation = operation
- for _, varDef := range operation.VariableDefinitions {
- varDef.Definition = w.Schema.Types[varDef.Type.Name()]
-
- if varDef.DefaultValue != nil {
- varDef.DefaultValue.ExpectedType = varDef.Type
- varDef.DefaultValue.Definition = w.Schema.Types[varDef.Type.Name()]
- }
- }
-
- var def *ast.Definition
- var loc ast.DirectiveLocation
- switch operation.Operation {
- case ast.Query, "":
- def = w.Schema.Query
- loc = ast.LocationQuery
- case ast.Mutation:
- def = w.Schema.Mutation
- loc = ast.LocationMutation
- case ast.Subscription:
- def = w.Schema.Subscription
- loc = ast.LocationSubscription
- }
-
- w.walkDirectives(def, operation.Directives, loc)
-
- for _, varDef := range operation.VariableDefinitions {
- if varDef.DefaultValue != nil {
- w.walkValue(varDef.DefaultValue)
- }
- }
-
- w.walkSelectionSet(def, operation.SelectionSet)
-
- for _, v := range w.Observers.operationVisitor {
- v(w, operation)
- }
- w.CurrentOperation = nil
-}
-
-func (w *Walker) walkFragment(it *ast.FragmentDefinition) {
- def := w.Schema.Types[it.TypeCondition]
-
- it.Definition = def
-
- w.walkDirectives(def, it.Directives, ast.LocationFragmentDefinition)
- w.walkSelectionSet(def, it.SelectionSet)
-
- for _, v := range w.Observers.fragment {
- v(w, it)
- }
-}
-
-func (w *Walker) walkDirectives(parentDef *ast.Definition, directives []*ast.Directive, location ast.DirectiveLocation) {
- for _, dir := range directives {
- def := w.Schema.Directives[dir.Name]
- dir.Definition = def
- dir.ParentDefinition = parentDef
- dir.Location = location
-
- for _, arg := range dir.Arguments {
- var argDef *ast.ArgumentDefinition
- if def != nil {
- argDef = def.Arguments.ForName(arg.Name)
- }
-
- w.walkArgument(argDef, arg)
- }
-
- for _, v := range w.Observers.directive {
- v(w, dir)
- }
- }
-
- for _, v := range w.Observers.directiveList {
- v(w, directives)
- }
-}
-
-func (w *Walker) walkValue(value *ast.Value) {
- if value.Kind == ast.Variable && w.CurrentOperation != nil {
- value.VariableDefinition = w.CurrentOperation.VariableDefinitions.ForName(value.Raw)
- if value.VariableDefinition != nil {
- value.VariableDefinition.Used = true
- }
- }
-
- if value.Kind == ast.ObjectValue {
- for _, child := range value.Children {
- if value.Definition != nil {
- fieldDef := value.Definition.Fields.ForName(child.Name)
- if fieldDef != nil {
- child.Value.ExpectedType = fieldDef.Type
- child.Value.Definition = w.Schema.Types[fieldDef.Type.Name()]
- }
- }
- w.walkValue(child.Value)
- }
- }
-
- if value.Kind == ast.ListValue {
- for _, child := range value.Children {
- if value.ExpectedType != nil && value.ExpectedType.Elem != nil {
- child.Value.ExpectedType = value.ExpectedType.Elem
- child.Value.Definition = value.Definition
- }
-
- w.walkValue(child.Value)
- }
- }
-
- for _, v := range w.Observers.value {
- v(w, value)
- }
-}
-
-func (w *Walker) walkArgument(argDef *ast.ArgumentDefinition, arg *ast.Argument) {
- if argDef != nil {
- arg.Value.ExpectedType = argDef.Type
- arg.Value.Definition = w.Schema.Types[argDef.Type.Name()]
- }
-
- w.walkValue(arg.Value)
-}
-
-func (w *Walker) walkSelectionSet(parentDef *ast.Definition, it ast.SelectionSet) {
- for _, child := range it {
- w.walkSelection(parentDef, child)
- }
-}
-
-func (w *Walker) walkSelection(parentDef *ast.Definition, it ast.Selection) {
- switch it := it.(type) {
- case *ast.Field:
- var def *ast.FieldDefinition
- if it.Name == "__typename" {
- def = &ast.FieldDefinition{
- Name: "__typename",
- Type: ast.NamedType("String", nil),
- }
- } else if parentDef != nil {
- def = parentDef.Fields.ForName(it.Name)
- }
-
- it.Definition = def
- it.ObjectDefinition = parentDef
-
- var nextParentDef *ast.Definition
- if def != nil {
- nextParentDef = w.Schema.Types[def.Type.Name()]
- }
-
- for _, arg := range it.Arguments {
- var argDef *ast.ArgumentDefinition
- if def != nil {
- argDef = def.Arguments.ForName(arg.Name)
- }
-
- w.walkArgument(argDef, arg)
- }
-
- w.walkDirectives(nextParentDef, it.Directives, ast.LocationField)
- w.walkSelectionSet(nextParentDef, it.SelectionSet)
-
- for _, v := range w.Observers.field {
- v(w, it)
- }
-
- case *ast.InlineFragment:
- it.ObjectDefinition = parentDef
-
- nextParentDef := parentDef
- if it.TypeCondition != "" {
- nextParentDef = w.Schema.Types[it.TypeCondition]
- }
-
- w.walkDirectives(nextParentDef, it.Directives, ast.LocationInlineFragment)
- w.walkSelectionSet(nextParentDef, it.SelectionSet)
-
- for _, v := range w.Observers.inlineFragment {
- v(w, it)
- }
-
- case *ast.FragmentSpread:
- def := w.Document.Fragments.ForName(it.Name)
- it.Definition = def
- it.ObjectDefinition = parentDef
-
- var nextParentDef *ast.Definition
- if def != nil {
- nextParentDef = w.Schema.Types[def.TypeCondition]
- }
-
- w.walkDirectives(nextParentDef, it.Directives, ast.LocationFragmentSpread)
-
- if def != nil && !w.validatedFragmentSpreads[def.Name] {
- // prevent inifinite recursion
- w.validatedFragmentSpreads[def.Name] = true
- w.walkSelectionSet(nextParentDef, def.SelectionSet)
- }
-
- for _, v := range w.Observers.fragmentSpread {
- v(w, it)
- }
-
- default:
- panic(fmt.Errorf("unsupported %T", it))
- }
-}