aboutsummaryrefslogtreecommitdiffstats
path: root/vendor/github.com/vektah/gqlparser/validator/rules
diff options
context:
space:
mode:
authorMichael Muré <batolettre@gmail.com>2018-09-14 12:40:31 +0200
committerMichael Muré <batolettre@gmail.com>2018-09-14 12:41:59 +0200
commitb478cd1bcb4756b20f7f4b15fcf81f23e1a60a02 (patch)
tree8ce232dcab3dd00708f8ba66c334472457e5980d /vendor/github.com/vektah/gqlparser/validator/rules
parenta3fc9abb921f5ce7084d6ab7473442d0b72b1d78 (diff)
downloadgit-bug-b478cd1bcb4756b20f7f4b15fcf81f23e1a60a02.tar.gz
graphql: update gqlgen to 0.5.1
fix #6
Diffstat (limited to 'vendor/github.com/vektah/gqlparser/validator/rules')
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go86
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go57
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go31
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go61
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go93
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go553
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go68
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go63
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go36
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go33
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go24
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go27
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go23
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go130
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go36
25 files changed, 1588 insertions, 0 deletions
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
new file mode 100644
index 00000000..69148d52
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
@@ -0,0 +1,86 @@
+package validator
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("FieldsOnCorrectType", func(observers *Events, addError AddErrFunc) {
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.ObjectDefinition == nil || field.Definition != nil {
+ return
+ }
+
+ message := fmt.Sprintf(`Cannot query field "%s" on type "%s".`, field.Name, field.ObjectDefinition.Name)
+
+ if suggestedTypeNames := getSuggestedTypeNames(walker, field.ObjectDefinition, field.Name); suggestedTypeNames != nil {
+ message += " Did you mean to use an inline fragment on " + QuotedOrList(suggestedTypeNames...) + "?"
+ } else if suggestedFieldNames := getSuggestedFieldNames(field.ObjectDefinition, field.Name); suggestedFieldNames != nil {
+ message += " Did you mean " + QuotedOrList(suggestedFieldNames...) + "?"
+ }
+
+ addError(
+ Message(message),
+ At(field.Position),
+ )
+ })
+ })
+}
+
+// Go through all of the implementations of type, as well as the interfaces
+// that they implement. If any of those types include the provided field,
+// suggest them, sorted by how often the type is referenced, starting
+// with Interfaces.
+func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) []string {
+ if !parent.IsAbstractType() {
+ return nil
+ }
+
+ var suggestedObjectTypes []string
+ var suggestedInterfaceTypes []string
+ interfaceUsageCount := map[string]int{}
+
+ for _, possibleType := range walker.Schema.GetPossibleTypes(parent) {
+ field := possibleType.Fields.ForName(name)
+ if field == nil {
+ continue
+ }
+
+ suggestedObjectTypes = append(suggestedObjectTypes, possibleType.Name)
+
+ for _, possibleInterface := range possibleType.Interfaces {
+ interfaceField := walker.Schema.Types[possibleInterface]
+ if interfaceField != nil && interfaceField.Fields.ForName(name) != nil {
+ if interfaceUsageCount[possibleInterface] == 0 {
+ suggestedInterfaceTypes = append(suggestedInterfaceTypes, possibleInterface)
+ }
+ interfaceUsageCount[possibleInterface]++
+ }
+ }
+ }
+
+ sort.SliceStable(suggestedInterfaceTypes, func(i, j int) bool {
+ return interfaceUsageCount[suggestedInterfaceTypes[i]] > interfaceUsageCount[suggestedInterfaceTypes[j]]
+ })
+
+ return append(suggestedInterfaceTypes, suggestedObjectTypes...)
+}
+
+// For the field name provided, determine if there are any similar field names
+// that may be the result of a typo.
+func getSuggestedFieldNames(parent *ast.Definition, name string) []string {
+ if parent.Kind != ast.Object && parent.Kind != ast.Interface {
+ return nil
+ }
+
+ var possibleFieldNames []string
+ for _, field := range parent.Fields {
+ possibleFieldNames = append(possibleFieldNames, field.Name)
+ }
+
+ return SuggestionList(name, possibleFieldNames)
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
new file mode 100644
index 00000000..a4a48246
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
@@ -0,0 +1,39 @@
+package validator
+
+import (
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("FragmentsOnCompositeTypes", func(observers *Events, addError AddErrFunc) {
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ fragmentType := walker.Schema.Types[inlineFragment.TypeCondition]
+ if fragmentType == nil || fragmentType.IsCompositeType() {
+ return
+ }
+
+ message := fmt.Sprintf(`Fragment cannot condition on non composite type "%s".`, inlineFragment.TypeCondition)
+
+ addError(
+ Message(message),
+ At(inlineFragment.Position),
+ )
+ })
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() {
+ return
+ }
+
+ message := fmt.Sprintf(`Fragment "%s" cannot condition on non composite type "%s".`, fragment.Name, fragment.TypeCondition)
+
+ addError(
+ Message(message),
+ At(fragment.Position),
+ )
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
new file mode 100644
index 00000000..83b47387
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
@@ -0,0 +1,57 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) {
+ // A GraphQL field is only valid if all supplied arguments are defined by that field.
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.Definition == nil {
+ return
+ }
+ for _, arg := range field.Arguments {
+ def := field.Definition.Arguments.ForName(arg.Name)
+ if def != nil {
+ continue
+ }
+
+ var suggestions []string
+ for _, argDef := range field.Definition.Arguments {
+ suggestions = append(suggestions, argDef.Name)
+ }
+
+ addError(
+ Message(`Unknown argument "%s" on field "%s" of type "%s".`, arg.Name, field.Name, field.ObjectDefinition.Name),
+ SuggestListQuoted("Did you mean", arg.Name, suggestions),
+ At(field.Position),
+ )
+ }
+ })
+
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ if directive.Definition == nil {
+ return
+ }
+ for _, arg := range directive.Arguments {
+ def := directive.Definition.Arguments.ForName(arg.Name)
+ if def != nil {
+ continue
+ }
+
+ var suggestions []string
+ for _, argDef := range directive.Definition.Arguments {
+ suggestions = append(suggestions, argDef.Name)
+ }
+
+ addError(
+ Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name),
+ SuggestListQuoted("Did you mean", arg.Name, suggestions),
+ At(directive.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
new file mode 100644
index 00000000..dc4353ef
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
@@ -0,0 +1,31 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownDirectives", func(observers *Events, addError AddErrFunc) {
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ if directive.Definition == nil {
+ addError(
+ Message(`Unknown directive "%s".`, directive.Name),
+ At(directive.Position),
+ )
+ return
+ }
+
+ for _, loc := range directive.Definition.Locations {
+ if loc == directive.Location {
+ return
+ }
+ }
+
+ addError(
+ Message(`Directive "%s" may not be used on %s.`, directive.Name, directive.Location),
+ At(directive.Position),
+ )
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
new file mode 100644
index 00000000..ec91588c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
@@ -0,0 +1,19 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
+ if fragmentSpread.Definition == nil {
+ addError(
+ Message(`Unknown fragment "%s".`, fragmentSpread.Name),
+ At(fragmentSpread.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
new file mode 100644
index 00000000..223086b3
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
@@ -0,0 +1,61 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownTypeNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ for _, vdef := range operation.VariableDefinitions {
+ typeName := vdef.Type.Name()
+ def := walker.Schema.Types[typeName]
+ if def != nil {
+ continue
+ }
+
+ addError(
+ Message(`Unknown type "%s".`, typeName),
+ At(operation.Position),
+ )
+ }
+ })
+
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ typedName := inlineFragment.TypeCondition
+ if typedName == "" {
+ return
+ }
+
+ def := walker.Schema.Types[typedName]
+ if def != nil {
+ return
+ }
+
+ addError(
+ Message(`Unknown type "%s".`, typedName),
+ At(inlineFragment.Position),
+ )
+ })
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ typeName := fragment.TypeCondition
+ def := walker.Schema.Types[typeName]
+ if def != nil {
+ return
+ }
+
+ var possibleTypes []string
+ for _, t := range walker.Schema.Types {
+ possibleTypes = append(possibleTypes, t.Name)
+ }
+
+ addError(
+ Message(`Unknown type "%s".`, typeName),
+ SuggestListQuoted("Did you mean", typeName, possibleTypes),
+ At(fragment.Position),
+ )
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go b/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
new file mode 100644
index 00000000..dd232142
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
@@ -0,0 +1,19 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("LoneAnonymousOperation", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ if operation.Name == "" && len(walker.Document.Operations) > 1 {
+ addError(
+ Message(`This anonymous operation must be the only defined operation.`),
+ At(operation.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
new file mode 100644
index 00000000..7511529b
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
@@ -0,0 +1,93 @@
+package validator
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoFragmentCycles", func(observers *Events, addError AddErrFunc) {
+ visitedFrags := make(map[string]bool)
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ var spreadPath []*ast.FragmentSpread
+ spreadPathIndexByName := make(map[string]int)
+
+ var recursive func(fragment *ast.FragmentDefinition)
+ recursive = func(fragment *ast.FragmentDefinition) {
+ if visitedFrags[fragment.Name] {
+ return
+ }
+
+ visitedFrags[fragment.Name] = true
+
+ spreadNodes := getFragmentSpreads(fragment.SelectionSet)
+ if len(spreadNodes) == 0 {
+ return
+ }
+ spreadPathIndexByName[fragment.Name] = len(spreadPath)
+
+ for _, spreadNode := range spreadNodes {
+ spreadName := spreadNode.Name
+
+ cycleIndex, ok := spreadPathIndexByName[spreadName]
+
+ spreadPath = append(spreadPath, spreadNode)
+ if !ok {
+ spreadFragment := walker.Document.Fragments.ForName(spreadName)
+ if spreadFragment != nil {
+ recursive(spreadFragment)
+ }
+ } else {
+ cyclePath := spreadPath[cycleIndex : len(spreadPath)-1]
+ var fragmentNames []string
+ for _, fs := range cyclePath {
+ fragmentNames = append(fragmentNames, fs.Name)
+ }
+ var via string
+ if len(fragmentNames) != 0 {
+ via = fmt.Sprintf(" via %s", strings.Join(fragmentNames, ", "))
+ }
+ addError(
+ Message(`Cannot spread fragment "%s" within itself%s.`, spreadName, via),
+ At(spreadNode.Position),
+ )
+ }
+
+ spreadPath = spreadPath[:len(spreadPath)-1]
+ }
+
+ delete(spreadPathIndexByName, fragment.Name)
+ }
+
+ recursive(fragment)
+ })
+ })
+}
+
+func getFragmentSpreads(node ast.SelectionSet) []*ast.FragmentSpread {
+ var spreads []*ast.FragmentSpread
+
+ setsToVisit := []ast.SelectionSet{node}
+
+ for len(setsToVisit) != 0 {
+ set := setsToVisit[len(setsToVisit)-1]
+ setsToVisit = setsToVisit[:len(setsToVisit)-1]
+
+ for _, selection := range set {
+ switch selection := selection.(type) {
+ case *ast.FragmentSpread:
+ spreads = append(spreads, selection)
+ case *ast.Field:
+ setsToVisit = append(setsToVisit, selection.SelectionSet)
+ case *ast.InlineFragment:
+ setsToVisit = append(setsToVisit, selection.SelectionSet)
+ }
+ }
+ }
+
+ return spreads
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
new file mode 100644
index 00000000..505206be
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
@@ -0,0 +1,28 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoUndefinedVariables", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if walker.CurrentOperation == nil || value.Kind != ast.Variable || value.VariableDefinition != nil {
+ return
+ }
+
+ if walker.CurrentOperation.Name != "" {
+ addError(
+ Message(`Variable "%s" is not defined by operation "%s".`, value, walker.CurrentOperation.Name),
+ At(walker.CurrentOperation.Position),
+ )
+ } else {
+ addError(
+ Message(`Variable "%s" is not defined.`, value),
+ At(value.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
new file mode 100644
index 00000000..4aa835f5
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
@@ -0,0 +1,30 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoUnusedFragments", func(observers *Events, addError AddErrFunc) {
+
+ inFragmentDefinition := false
+ fragmentNameUsed := make(map[string]bool)
+
+ observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
+ if !inFragmentDefinition {
+ fragmentNameUsed[fragmentSpread.Name] = true
+ }
+ })
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ inFragmentDefinition = true
+ if !fragmentNameUsed[fragment.Name] {
+ addError(
+ Message(`Fragment "%s" is never used.`, fragment.Name),
+ At(fragment.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
new file mode 100644
index 00000000..28cf7736
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
@@ -0,0 +1,30 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ for _, varDef := range operation.VariableDefinitions {
+ if varDef.Used {
+ continue
+ }
+
+ if operation.Name != "" {
+ addError(
+ Message(`Variable "$%s" is never used in operation "%s".`, varDef.Variable, operation.Name),
+ At(varDef.Position),
+ )
+ } else {
+ addError(
+ Message(`Variable "$%s" is never used.`, varDef.Variable),
+ At(varDef.Position),
+ )
+ }
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go b/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
new file mode 100644
index 00000000..52eab3a2
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
@@ -0,0 +1,553 @@
+package validator
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+
+ AddRule("OverlappingFieldsCanBeMerged", func(observers *Events, addError AddErrFunc) {
+ /**
+ * Algorithm:
+ *
+ * Conflicts occur when two fields exist in a query which will produce the same
+ * response name, but represent differing values, thus creating a conflict.
+ * The algorithm below finds all conflicts via making a series of comparisons
+ * between fields. In order to compare as few fields as possible, this makes
+ * a series of comparisons "within" sets of fields and "between" sets of fields.
+ *
+ * Given any selection set, a collection produces both a set of fields by
+ * also including all inline fragments, as well as a list of fragments
+ * referenced by fragment spreads.
+ *
+ * A) Each selection set represented in the document first compares "within" its
+ * collected set of fields, finding any conflicts between every pair of
+ * overlapping fields.
+ * Note: This is the *only time* that a the fields "within" a set are compared
+ * to each other. After this only fields "between" sets are compared.
+ *
+ * B) Also, if any fragment is referenced in a selection set, then a
+ * comparison is made "between" the original set of fields and the
+ * referenced fragment.
+ *
+ * C) Also, if multiple fragments are referenced, then comparisons
+ * are made "between" each referenced fragment.
+ *
+ * D) When comparing "between" a set of fields and a referenced fragment, first
+ * a comparison is made between each field in the original set of fields and
+ * each field in the the referenced set of fields.
+ *
+ * E) Also, if any fragment is referenced in the referenced selection set,
+ * then a comparison is made "between" the original set of fields and the
+ * referenced fragment (recursively referring to step D).
+ *
+ * F) When comparing "between" two fragments, first a comparison is made between
+ * each field in the first referenced set of fields and each field in the the
+ * second referenced set of fields.
+ *
+ * G) Also, any fragments referenced by the first must be compared to the
+ * second, and any fragments referenced by the second must be compared to the
+ * first (recursively referring to step F).
+ *
+ * H) When comparing two fields, if both have selection sets, then a comparison
+ * is made "between" both selection sets, first comparing the set of fields in
+ * the first selection set with the set of fields in the second.
+ *
+ * I) Also, if any fragment is referenced in either selection set, then a
+ * comparison is made "between" the other set of fields and the
+ * referenced fragment.
+ *
+ * J) Also, if two fragments are referenced in both selection sets, then a
+ * comparison is made "between" the two fragments.
+ *
+ */
+
+ m := &overlappingFieldsCanBeMergedManager{
+ comparedFragmentPairs: pairSet{data: make(map[string]map[string]bool)},
+ }
+
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(operation.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if walker.CurrentOperation == nil {
+ // When checking both Operation and Fragment, errors are duplicated when processing FragmentDefinition referenced from Operation
+ return
+ }
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(field.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(inlineFragment.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(fragment.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ })
+}
+
+type pairSet struct {
+ data map[string]map[string]bool
+}
+
+func (pairSet *pairSet) Add(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) {
+ add := func(a *ast.FragmentSpread, b *ast.FragmentSpread) {
+ m := pairSet.data[a.Name]
+ if m == nil {
+ m = make(map[string]bool)
+ pairSet.data[a.Name] = m
+ }
+ m[b.Name] = areMutuallyExclusive
+ }
+ add(a, b)
+ add(b, a)
+}
+
+func (pairSet *pairSet) Has(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) bool {
+ am, ok := pairSet.data[a.Name]
+ if !ok {
+ return false
+ }
+ result, ok := am[b.Name]
+ if !ok {
+ return false
+ }
+
+ // areMutuallyExclusive being false is a superset of being true,
+ // hence if we want to know if this PairSet "has" these two with no
+ // exclusivity, we have to ensure it was added as such.
+ if !areMutuallyExclusive {
+ return !result
+ }
+
+ return true
+}
+
+type sequentialFieldsMap struct {
+ // We can't use map[string][]*ast.Field. because map is not stable...
+ seq []string
+ data map[string][]*ast.Field
+}
+
+type fieldIterateEntry struct {
+ ResponseName string
+ Fields []*ast.Field
+}
+
+func (m *sequentialFieldsMap) Push(responseName string, field *ast.Field) {
+ fields, ok := m.data[responseName]
+ if !ok {
+ m.seq = append(m.seq, responseName)
+ }
+ fields = append(fields, field)
+ m.data[responseName] = fields
+}
+
+func (m *sequentialFieldsMap) Get(responseName string) ([]*ast.Field, bool) {
+ fields, ok := m.data[responseName]
+ return fields, ok
+}
+
+func (m *sequentialFieldsMap) Iterator() [][]*ast.Field {
+ fieldsList := make([][]*ast.Field, 0, len(m.seq))
+ for _, responseName := range m.seq {
+ fields := m.data[responseName]
+ fieldsList = append(fieldsList, fields)
+ }
+ return fieldsList
+}
+
+func (m *sequentialFieldsMap) KeyValueIterator() []*fieldIterateEntry {
+ fieldEntriesList := make([]*fieldIterateEntry, 0, len(m.seq))
+ for _, responseName := range m.seq {
+ fields := m.data[responseName]
+ fieldEntriesList = append(fieldEntriesList, &fieldIterateEntry{
+ ResponseName: responseName,
+ Fields: fields,
+ })
+ }
+ return fieldEntriesList
+}
+
+type conflictMessageContainer struct {
+ Conflicts []*ConflictMessage
+}
+
+type ConflictMessage struct {
+ Message string
+ ResponseName string
+ Names []string
+ SubMessage []*ConflictMessage
+ Position *ast.Position
+}
+
+func (m *ConflictMessage) String(buf *bytes.Buffer) {
+ if len(m.SubMessage) == 0 {
+ buf.WriteString(m.Message)
+ return
+ }
+
+ for idx, subMessage := range m.SubMessage {
+ buf.WriteString(`subfields "`)
+ buf.WriteString(subMessage.ResponseName)
+ buf.WriteString(`" conflict because `)
+ subMessage.String(buf)
+ if idx != len(m.SubMessage)-1 {
+ buf.WriteString(" and ")
+ }
+ }
+}
+
+func (m *ConflictMessage) addFieldsConflictMessage(addError AddErrFunc) {
+ var buf bytes.Buffer
+ m.String(&buf)
+ addError(
+ Message(`Fields "%s" conflict because %s. Use different aliases on the fields to fetch both if this was intentional.`, m.ResponseName, buf.String()),
+ At(m.Position),
+ )
+}
+
+type overlappingFieldsCanBeMergedManager struct {
+ walker *Walker
+
+ // per walker
+ comparedFragmentPairs pairSet
+ // cachedFieldsAndFragmentNames interface{}
+
+ // per selectionSet
+ comparedFragments map[string]bool
+}
+
+func (m *overlappingFieldsCanBeMergedManager) findConflictsWithinSelectionSet(selectionSet ast.SelectionSet) []*ConflictMessage {
+ if len(selectionSet) == 0 {
+ return nil
+ }
+
+ fieldsMap, fragmentSpreads := getFieldsAndFragmentNames(selectionSet)
+
+ var conflicts conflictMessageContainer
+
+ // (A) Find find all conflicts "within" the fieldMap of this selection set.
+ // Note: this is the *only place* `collectConflictsWithin` is called.
+ m.collectConflictsWithin(&conflicts, fieldsMap)
+
+ m.comparedFragments = make(map[string]bool)
+ for idx, fragmentSpreadA := range fragmentSpreads {
+ // (B) Then collect conflicts between these fieldMap and those represented by
+ // each spread fragment name found.
+ m.collectConflictsBetweenFieldsAndFragment(&conflicts, false, fieldsMap, fragmentSpreadA)
+
+ for _, fragmentSpreadB := range fragmentSpreads[idx+1:] {
+ // (C) Then compare this fragment with all other fragments found in this
+ // selection set to collect conflicts between fragments spread together.
+ // This compares each item in the list of fragment names to every other
+ // item in that same list (except for itself).
+ m.collectConflictsBetweenFragments(&conflicts, false, fragmentSpreadA, fragmentSpreadB)
+ }
+ }
+
+ return conflicts.Conflicts
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFieldsAndFragment(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fieldsMap *sequentialFieldsMap, fragmentSpread *ast.FragmentSpread) {
+ if m.comparedFragments[fragmentSpread.Name] {
+ return
+ }
+ m.comparedFragments[fragmentSpread.Name] = true
+
+ if fragmentSpread.Definition == nil {
+ return
+ }
+
+ fieldsMapB, fragmentSpreads := getFieldsAndFragmentNames(fragmentSpread.Definition.SelectionSet)
+
+ // Do not compare a fragment's fieldMap to itself.
+ if reflect.DeepEqual(fieldsMap, fieldsMapB) {
+ return
+ }
+
+ // (D) First collect any conflicts between the provided collection of fields
+ // and the collection of fields represented by the given fragment.
+ m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMap, fieldsMapB)
+
+ // (E) Then collect any conflicts between the provided collection of fields
+ // and any fragment names found in the given fragment.
+ for _, fragmentSpread := range fragmentSpreads {
+ m.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsMap, fragmentSpread)
+ }
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFragments(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
+
+ var check func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread)
+ check = func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
+
+ if fragmentSpreadA.Name == fragmentSpreadB.Name {
+ return
+ }
+
+ if m.comparedFragmentPairs.Has(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive) {
+ return
+ }
+ m.comparedFragmentPairs.Add(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive)
+
+ if fragmentSpreadA.Definition == nil {
+ return
+ }
+ if fragmentSpreadB.Definition == nil {
+ return
+ }
+
+ fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(fragmentSpreadA.Definition.SelectionSet)
+ fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(fragmentSpreadB.Definition.SelectionSet)
+
+ // (F) First, collect all conflicts between these two collections of fields
+ // (not including any nested fragments).
+ m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
+
+ // (G) Then collect conflicts between the first fragment and any nested
+ // fragments spread in the second fragment.
+ for _, fragmentSpread := range fragmentSpreadsB {
+ check(fragmentSpreadA, fragmentSpread)
+ }
+ // (G) Then collect conflicts between the second fragment and any nested
+ // fragments spread in the first fragment.
+ for _, fragmentSpread := range fragmentSpreadsA {
+ check(fragmentSpread, fragmentSpreadB)
+ }
+ }
+
+ check(fragmentSpreadA, fragmentSpreadB)
+}
+
+func (m *overlappingFieldsCanBeMergedManager) findConflictsBetweenSubSelectionSets(areMutuallyExclusive bool, selectionSetA ast.SelectionSet, selectionSetB ast.SelectionSet) *conflictMessageContainer {
+ var conflicts conflictMessageContainer
+
+ fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(selectionSetA)
+ fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(selectionSetB)
+
+ // (H) First, collect all conflicts between these two collections of field.
+ m.collectConflictsBetween(&conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
+
+ // (I) Then collect conflicts between the first collection of fields and
+ // those referenced by each fragment name associated with the second.
+ for _, fragmentSpread := range fragmentSpreadsB {
+ m.comparedFragments = make(map[string]bool)
+ m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapA, fragmentSpread)
+ }
+
+ // (I) Then collect conflicts between the second collection of fields and
+ // those referenced by each fragment name associated with the first.
+ for _, fragmentSpread := range fragmentSpreadsA {
+ m.comparedFragments = make(map[string]bool)
+ m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapB, fragmentSpread)
+ }
+
+ // (J) Also collect conflicts between any fragment names by the first and
+ // fragment names by the second. This compares each item in the first set of
+ // names to each item in the second set of names.
+ for _, fragmentSpreadA := range fragmentSpreadsA {
+ for _, fragmentSpreadB := range fragmentSpreadsB {
+ m.collectConflictsBetweenFragments(&conflicts, areMutuallyExclusive, fragmentSpreadA, fragmentSpreadB)
+ }
+ }
+
+ if len(conflicts.Conflicts) == 0 {
+ return nil
+ }
+
+ return &conflicts
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsWithin(conflicts *conflictMessageContainer, fieldsMap *sequentialFieldsMap) {
+ for _, fields := range fieldsMap.Iterator() {
+ for idx, fieldA := range fields {
+ for _, fieldB := range fields[idx+1:] {
+ conflict := m.findConflict(false, fieldA, fieldB)
+ if conflict != nil {
+ conflicts.Conflicts = append(conflicts.Conflicts, conflict)
+ }
+ }
+ }
+ }
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetween(conflicts *conflictMessageContainer, parentFieldsAreMutuallyExclusive bool, fieldsMapA *sequentialFieldsMap, fieldsMapB *sequentialFieldsMap) {
+ for _, fieldsEntryA := range fieldsMapA.KeyValueIterator() {
+ fieldsB, ok := fieldsMapB.Get(fieldsEntryA.ResponseName)
+ if !ok {
+ continue
+ }
+ for _, fieldA := range fieldsEntryA.Fields {
+ for _, fieldB := range fieldsB {
+ conflict := m.findConflict(parentFieldsAreMutuallyExclusive, fieldA, fieldB)
+ if conflict != nil {
+ conflicts.Conflicts = append(conflicts.Conflicts, conflict)
+ }
+ }
+ }
+ }
+}
+
+func (m *overlappingFieldsCanBeMergedManager) findConflict(parentFieldsAreMutuallyExclusive bool, fieldA *ast.Field, fieldB *ast.Field) *ConflictMessage {
+ if fieldA.Definition == nil || fieldA.ObjectDefinition == nil || fieldB.Definition == nil || fieldB.ObjectDefinition == nil {
+ return nil
+ }
+
+ areMutuallyExclusive := parentFieldsAreMutuallyExclusive
+ if !areMutuallyExclusive {
+ tmp := fieldA.ObjectDefinition.Name != fieldB.ObjectDefinition.Name
+ tmp = tmp && fieldA.ObjectDefinition.Kind == ast.Object
+ tmp = tmp && fieldB.ObjectDefinition.Kind == ast.Object
+ areMutuallyExclusive = tmp
+ }
+
+ fieldNameA := fieldA.Name
+ if fieldA.Alias != "" {
+ fieldNameA = fieldA.Alias
+ }
+
+ if !areMutuallyExclusive {
+ // Two aliases must refer to the same field.
+ if fieldA.Name != fieldB.Name {
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ Message: fmt.Sprintf(`%s and %s are different fields`, fieldA.Name, fieldB.Name),
+ Position: fieldB.Position,
+ }
+ }
+
+ // Two field calls must have the same arguments.
+ if !sameArguments(fieldA.Arguments, fieldB.Arguments) {
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ Message: "they have differing arguments",
+ Position: fieldB.Position,
+ }
+ }
+ }
+
+ if doTypesConflict(m.walker, fieldA.Definition.Type, fieldB.Definition.Type) {
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ Message: fmt.Sprintf(`they return conflicting types %s and %s`, fieldA.Definition.Type.String(), fieldB.Definition.Type.String()),
+ Position: fieldB.Position,
+ }
+ }
+
+ // Collect and compare sub-fields. Use the same "visited fragment names" list
+ // for both collections so fields in a fragment reference are never
+ // compared to themselves.
+ conflicts := m.findConflictsBetweenSubSelectionSets(areMutuallyExclusive, fieldA.SelectionSet, fieldB.SelectionSet)
+ if conflicts == nil {
+ return nil
+ }
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ SubMessage: conflicts.Conflicts,
+ Position: fieldB.Position,
+ }
+}
+
+func sameArguments(args1 []*ast.Argument, args2 []*ast.Argument) bool {
+ if len(args1) != len(args2) {
+ return false
+ }
+ for _, arg1 := range args1 {
+ for _, arg2 := range args2 {
+ if arg1.Name != arg2.Name {
+ return false
+ }
+ if !sameValue(arg1.Value, arg2.Value) {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+func sameValue(value1 *ast.Value, value2 *ast.Value) bool {
+ if value1.Kind != value2.Kind {
+ return false
+ }
+ if value1.Raw != value2.Raw {
+ return false
+ }
+ return true
+}
+
+func doTypesConflict(walker *Walker, type1 *ast.Type, type2 *ast.Type) bool {
+ if type1.Elem != nil {
+ if type2.Elem != nil {
+ return doTypesConflict(walker, type1.Elem, type2.Elem)
+ }
+ return true
+ }
+ if type2.Elem != nil {
+ return true
+ }
+ if type1.NonNull && !type2.NonNull {
+ return true
+ }
+ if !type1.NonNull && type2.NonNull {
+ return true
+ }
+
+ t1 := walker.Schema.Types[type1.NamedType]
+ t2 := walker.Schema.Types[type2.NamedType]
+ if (t1.Kind == ast.Scalar || t1.Kind == ast.Enum) && (t2.Kind == ast.Scalar || t2.Kind == ast.Enum) {
+ return t1.Name != t2.Name
+ }
+
+ return false
+}
+
+func getFieldsAndFragmentNames(selectionSet ast.SelectionSet) (*sequentialFieldsMap, []*ast.FragmentSpread) {
+ fieldsMap := sequentialFieldsMap{
+ data: make(map[string][]*ast.Field),
+ }
+ var fragmentSpreads []*ast.FragmentSpread
+
+ var walk func(selectionSet ast.SelectionSet)
+ walk = func(selectionSet ast.SelectionSet) {
+ for _, selection := range selectionSet {
+ switch selection := selection.(type) {
+ case *ast.Field:
+ responseName := selection.Name
+ if selection.Alias != "" {
+ responseName = selection.Alias
+ }
+ fieldsMap.Push(responseName, selection)
+
+ case *ast.InlineFragment:
+ walk(selection.SelectionSet)
+
+ case *ast.FragmentSpread:
+ fragmentSpreads = append(fragmentSpreads, selection)
+ }
+ }
+ }
+ walk(selectionSet)
+
+ return &fieldsMap, fragmentSpreads
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go b/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
new file mode 100644
index 00000000..971decbf
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
@@ -0,0 +1,68 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("PossibleFragmentSpreads", func(observers *Events, addError AddErrFunc) {
+
+ validate := func(walker *Walker, parentDef *ast.Definition, fragmentName string, emitError func()) {
+ if parentDef == nil {
+ return
+ }
+
+ var parentDefs []*ast.Definition
+ switch parentDef.Kind {
+ case ast.Object:
+ parentDefs = []*ast.Definition{parentDef}
+ case ast.Interface, ast.Union:
+ parentDefs = walker.Schema.GetPossibleTypes(parentDef)
+ default:
+ panic("unexpected type")
+ }
+
+ fragmentDefType := walker.Schema.Types[fragmentName]
+ if fragmentDefType == nil {
+ return
+ }
+ if !fragmentDefType.IsCompositeType() {
+ // checked by FragmentsOnCompositeTypes
+ return
+ }
+ fragmentDefs := walker.Schema.GetPossibleTypes(fragmentDefType)
+
+ for _, fragmentDef := range fragmentDefs {
+ for _, parentDef := range parentDefs {
+ if parentDef.Name == fragmentDef.Name {
+ return
+ }
+ }
+ }
+
+ emitError()
+ }
+
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ validate(walker, inlineFragment.ObjectDefinition, inlineFragment.TypeCondition, func() {
+ addError(
+ Message(`Fragment cannot be spread here as objects of type "%s" can never be of type "%s".`, inlineFragment.ObjectDefinition.Name, inlineFragment.TypeCondition),
+ At(inlineFragment.Position),
+ )
+ })
+ })
+
+ observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
+ if fragmentSpread.Definition == nil {
+ return
+ }
+ validate(walker, fragmentSpread.ObjectDefinition, fragmentSpread.Definition.TypeCondition, func() {
+ addError(
+ Message(`Fragment "%s" cannot be spread here as objects of type "%s" can never be of type "%s".`, fragmentSpread.Name, fragmentSpread.ObjectDefinition.Name, fragmentSpread.Definition.TypeCondition),
+ At(fragmentSpread.Position),
+ )
+ })
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go b/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
new file mode 100644
index 00000000..55791a6b
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
@@ -0,0 +1,63 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) {
+
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.Definition == nil {
+ return
+ }
+
+ argDef:
+ for _, argDef := range field.Definition.Arguments {
+ if !argDef.Type.NonNull {
+ continue
+ }
+ if argDef.DefaultValue != nil {
+ continue
+ }
+ for _, arg := range field.Arguments {
+ if arg.Name == argDef.Name {
+ continue argDef
+ }
+ }
+
+ addError(
+ Message(`Field "%s" argument "%s" of type "%s" is required but not provided.`, field.Name, argDef.Name, argDef.Type.String()),
+ At(field.Position),
+ )
+ }
+ })
+
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ if directive.Definition == nil {
+ return
+ }
+
+ argDef:
+ for _, argDef := range directive.Definition.Arguments {
+ if !argDef.Type.NonNull {
+ continue
+ }
+ if argDef.DefaultValue != nil {
+ continue
+ }
+ for _, arg := range directive.Arguments {
+ if arg.Name == argDef.Name {
+ continue argDef
+ }
+ }
+
+ addError(
+ Message(`Directive "@%s" argument "%s" of type "%s" is required but not provided.`, directive.Definition.Name, argDef.Name, argDef.Type.String()),
+ At(directive.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go b/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
new file mode 100644
index 00000000..bb961f44
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
@@ -0,0 +1,36 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("ScalarLeafs", func(observers *Events, addError AddErrFunc) {
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.Definition == nil {
+ return
+ }
+
+ fieldType := walker.Schema.Types[field.Definition.Type.Name()]
+ if fieldType == nil {
+ return
+ }
+
+ if fieldType.IsLeafType() && len(field.SelectionSet) > 0 {
+ addError(
+ Message(`Field "%s" must not have a selection since type "%s" has no subfields.`, field.Name, fieldType.Name),
+ At(field.Position),
+ )
+ }
+
+ if !fieldType.IsLeafType() && len(field.SelectionSet) == 0 {
+ addError(
+ Message(`Field "%s" of type "%s" must have a selection of subfields.`, field.Name, field.Definition.Type.String()),
+ Suggestf(`"%s { ... }"`, field.Name),
+ At(field.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go b/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
new file mode 100644
index 00000000..53003c11
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
@@ -0,0 +1,30 @@
+package validator
+
+import (
+ "strconv"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("SingleFieldSubscriptions", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ if operation.Operation != ast.Subscription {
+ return
+ }
+
+ if len(operation.SelectionSet) > 1 {
+ name := "Anonymous Subscription"
+ if operation.Name != "" {
+ name = `Subscription ` + strconv.Quote(operation.Name)
+ }
+
+ addError(
+ Message(`%s must select only one top level field.`, name),
+ At(operation.SelectionSet[1].GetPosition()),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
new file mode 100644
index 00000000..0ddcde72
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
@@ -0,0 +1,33 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ checkUniqueArgs(field.Arguments, addError)
+ })
+
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ checkUniqueArgs(directive.Arguments, addError)
+ })
+ })
+}
+
+func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) {
+ knownArgNames := map[string]bool{}
+
+ for _, arg := range args {
+ if knownArgNames[arg.Name] {
+ addError(
+ Message(`There can be only one argument named "%s".`, arg.Name),
+ At(arg.Position),
+ )
+ }
+
+ knownArgNames[arg.Name] = true
+ }
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
new file mode 100644
index 00000000..077c4687
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
@@ -0,0 +1,24 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) {
+ observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) {
+ seen := map[string]bool{}
+
+ for _, dir := range directives {
+ if seen[dir.Name] {
+ addError(
+ Message(`The directive "%s" can only be used once at this location.`, dir.Name),
+ At(dir.Position),
+ )
+ }
+ seen[dir.Name] = true
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
new file mode 100644
index 00000000..46a8b7c7
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
@@ -0,0 +1,22 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) {
+ seenFragments := map[string]bool{}
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ if seenFragments[fragment.Name] {
+ addError(
+ Message(`There can be only one fragment named "%s".`, fragment.Name),
+ At(fragment.Position),
+ )
+ }
+ seenFragments[fragment.Name] = true
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
new file mode 100644
index 00000000..f254d588
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
@@ -0,0 +1,27 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if value.Kind != ast.ObjectValue {
+ return
+ }
+
+ seen := map[string]bool{}
+ for _, field := range value.Children {
+ if seen[field.Name] {
+ addError(
+ Message(`There can be only one input field named "%s".`, field.Name),
+ At(field.Position),
+ )
+ }
+ seen[field.Name] = true
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
new file mode 100644
index 00000000..c1ab56be
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
@@ -0,0 +1,22 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) {
+ seen := map[string]bool{}
+
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ if seen[operation.Name] {
+ addError(
+ Message(`There can be only one operation named "%s".`, operation.Name),
+ At(operation.Position),
+ )
+ }
+ seen[operation.Name] = true
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
new file mode 100644
index 00000000..70590a88
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
@@ -0,0 +1,23 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ seen := map[string]bool{}
+ for _, def := range operation.VariableDefinitions {
+ if seen[def.Variable] {
+ addError(
+ Message(`There can be only one variable named "%s".`, def.Variable),
+ At(def.Position),
+ )
+ }
+ seen[def.Variable] = true
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
new file mode 100644
index 00000000..d64cc666
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
@@ -0,0 +1,130 @@
+package validator
+
+import (
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if value.Definition == nil || value.ExpectedType == nil {
+ return
+ }
+
+ if value.Definition.Kind == ast.Scalar {
+ // Skip custom validating scalars
+ if !value.Definition.OneOf("Int", "Float", "String", "Boolean", "ID") {
+ return
+ }
+ }
+
+ var possibleEnums []string
+ if value.Definition.Kind == ast.Enum {
+ for _, val := range value.Definition.EnumValues {
+ possibleEnums = append(possibleEnums, val.Name)
+ }
+ }
+
+ rawVal, err := value.Value(nil)
+ if err != nil {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ switch value.Kind {
+ case ast.NullValue:
+ if value.ExpectedType.NonNull {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.ListValue:
+ if value.ExpectedType.Elem == nil {
+ unexpectedTypeMessage(addError, value)
+ return
+ }
+
+ case ast.IntValue:
+ if !value.Definition.OneOf("Int", "Float", "ID") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.FloatValue:
+ if !value.Definition.OneOf("Float") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.StringValue, ast.BlockValue:
+ if value.Definition.Kind == ast.Enum {
+ rawValStr := fmt.Sprint(rawVal)
+ addError(
+ Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
+ SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
+ At(value.Position),
+ )
+ } else if !value.Definition.OneOf("String", "ID") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.EnumValue:
+ if value.Definition.Kind != ast.Enum || value.Definition.EnumValues.ForName(value.Raw) == nil {
+ rawValStr := fmt.Sprint(rawVal)
+ addError(
+ Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
+ SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
+ At(value.Position),
+ )
+ }
+
+ case ast.BooleanValue:
+ if !value.Definition.OneOf("Boolean") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.ObjectValue:
+
+ for _, field := range value.Definition.Fields {
+ if field.Type.NonNull {
+ fieldValue := value.Children.ForName(field.Name)
+ if fieldValue == nil && field.DefaultValue == nil {
+ addError(
+ Message("Field %s.%s of required type %s was not provided.", value.Definition.Name, field.Name, field.Type.String()),
+ At(value.Position),
+ )
+ continue
+ }
+ }
+ }
+
+ for _, fieldValue := range value.Children {
+ if value.Definition.Fields.ForName(fieldValue.Name) == nil {
+ var suggestions []string
+ for _, fieldValue := range value.Definition.Fields {
+ suggestions = append(suggestions, fieldValue.Name)
+ }
+
+ addError(
+ Message(`Field "%s" is not defined by type %s.`, fieldValue.Name, value.Definition.Name),
+ SuggestListUnquoted("Did you mean", fieldValue.Name, suggestions),
+ At(fieldValue.Position),
+ )
+ }
+ }
+
+ case ast.Variable:
+ return
+
+ default:
+ panic(fmt.Errorf("unhandled %T", value))
+ }
+ })
+ })
+}
+
+func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) {
+ addError(
+ Message("Expected type %s, found %s.", v.ExpectedType.String(), v.String()),
+ At(v.Position),
+ )
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
new file mode 100644
index 00000000..9d58ae1c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
@@ -0,0 +1,28 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ for _, def := range operation.VariableDefinitions {
+ if def.Definition == nil {
+ continue
+ }
+ if !def.Definition.IsInputType() {
+ addError(
+ Message(
+ `Variable "$%s" cannot be non-input type "%s".`,
+ def.Variable,
+ def.Type.String(),
+ ),
+ At(def.Position),
+ )
+ }
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
new file mode 100644
index 00000000..e6d97c9f
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
@@ -0,0 +1,36 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("VariablesInAllowedPosition", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if value.Kind != ast.Variable || value.ExpectedType == nil || value.VariableDefinition == nil || walker.CurrentOperation == nil {
+ return
+ }
+
+ // todo: move me into walk
+ // If there is a default non nullable types can be null
+ if value.VariableDefinition.DefaultValue != nil && value.VariableDefinition.DefaultValue.Kind != ast.NullValue {
+ if value.ExpectedType.NonNull {
+ value.ExpectedType.NonNull = false
+ }
+ }
+
+ if !value.VariableDefinition.Type.IsCompatible(value.ExpectedType) {
+ addError(
+ Message(
+ `Variable "%s" of type "%s" used in position expecting type "%s".`,
+ value,
+ value.VariableDefinition.Type.String(),
+ value.ExpectedType.String(),
+ ),
+ At(value.Position),
+ )
+ }
+ })
+ })
+}