aboutsummaryrefslogtreecommitdiffstats
path: root/vendor/github.com/vektah
diff options
context:
space:
mode:
authorMichael Muré <batolettre@gmail.com>2020-02-05 22:03:19 +0100
committerMichael Muré <batolettre@gmail.com>2020-02-05 22:33:03 +0100
commit1d4bb7ceb0cef79d68df0bacc913b01e40e6ddd6 (patch)
treee088b0fa43058afde1db71541d8fcb4b94905d6e /vendor/github.com/vektah
parentf093be96e98284580d61664adecd0a2ff8b354e4 (diff)
downloadgit-bug-1d4bb7ceb0cef79d68df0bacc913b01e40e6ddd6.tar.gz
migrate to go modules
Diffstat (limited to 'vendor/github.com/vektah')
-rw-r--r--vendor/github.com/vektah/gqlgen/LICENSE19
-rw-r--r--vendor/github.com/vektah/gqlgen/client/client.go141
-rw-r--r--vendor/github.com/vektah/gqlgen/client/readme.md5
-rw-r--r--vendor/github.com/vektah/gqlgen/client/websocket.go103
-rw-r--r--vendor/github.com/vektah/gqlparser/.gitignore5
-rw-r--r--vendor/github.com/vektah/gqlparser/.gometalinter.json13
-rw-r--r--vendor/github.com/vektah/gqlparser/LICENSE19
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/argmap.go37
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/collections.go138
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/definition.go93
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/directive.go42
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/document.go67
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/dumper.go159
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/fragment.go38
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/operation.go29
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/selection.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/source.go15
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/type.go68
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/value.go120
-rw-r--r--vendor/github.com/vektah/gqlparser/gqlerror/error.go133
-rw-r--r--vendor/github.com/vektah/gqlparser/gqlparser.go42
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/blockstring.go58
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/lexer.go510
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml672
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/token.go148
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/parser.go136
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/query.go348
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/query_test.yml520
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/schema.go527
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/schema_test.yml540
-rw-r--r--vendor/github.com/vektah/gqlparser/readme.md17
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/error.go55
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/imported/LICENSE33
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/messaging.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/prelude.go9
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/prelude.graphql119
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go86
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go57
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go31
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go61
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go93
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go557
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go68
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go63
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go36
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go33
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go24
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go27
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go23
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go130
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go36
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/schema.go276
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/schema_test.yml323
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/suggestionList.go69
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/validator.go44
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/vars.go199
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/walk.go286
67 files changed, 0 insertions, 7845 deletions
diff --git a/vendor/github.com/vektah/gqlgen/LICENSE b/vendor/github.com/vektah/gqlgen/LICENSE
deleted file mode 100644
index 18e1b249..00000000
--- a/vendor/github.com/vektah/gqlgen/LICENSE
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2018 Adam Scarr
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/vendor/github.com/vektah/gqlgen/client/client.go b/vendor/github.com/vektah/gqlgen/client/client.go
deleted file mode 100644
index 1d482700..00000000
--- a/vendor/github.com/vektah/gqlgen/client/client.go
+++ /dev/null
@@ -1,141 +0,0 @@
-// client is used internally for testing. See readme for alternatives
-package client
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
- "io/ioutil"
- "net/http"
-
- "github.com/mitchellh/mapstructure"
-)
-
-// Client for graphql requests
-type Client struct {
- url string
- client *http.Client
-}
-
-// New creates a graphql client
-func New(url string, client ...*http.Client) *Client {
- p := &Client{
- url: url,
- }
-
- if len(client) > 0 {
- p.client = client[0]
- } else {
- p.client = http.DefaultClient
- }
- return p
-}
-
-type Request struct {
- Query string `json:"query"`
- Variables map[string]interface{} `json:"variables,omitempty"`
- OperationName string `json:"operationName,omitempty"`
-}
-
-type Option func(r *Request)
-
-func Var(name string, value interface{}) Option {
- return func(r *Request) {
- if r.Variables == nil {
- r.Variables = map[string]interface{}{}
- }
-
- r.Variables[name] = value
- }
-}
-
-func Operation(name string) Option {
- return func(r *Request) {
- r.OperationName = name
- }
-}
-
-func (p *Client) MustPost(query string, response interface{}, options ...Option) {
- if err := p.Post(query, response, options...); err != nil {
- panic(err)
- }
-}
-
-func (p *Client) mkRequest(query string, options ...Option) Request {
- r := Request{
- Query: query,
- }
-
- for _, option := range options {
- option(&r)
- }
-
- return r
-}
-
-func (p *Client) Post(query string, response interface{}, options ...Option) (resperr error) {
- r := p.mkRequest(query, options...)
- requestBody, err := json.Marshal(r)
- if err != nil {
- return fmt.Errorf("encode: %s", err.Error())
- }
-
- rawResponse, err := p.client.Post(p.url, "application/json", bytes.NewBuffer(requestBody))
- if err != nil {
- return fmt.Errorf("post: %s", err.Error())
- }
- defer func() {
- _ = rawResponse.Body.Close()
- }()
-
- if rawResponse.StatusCode >= http.StatusBadRequest {
- responseBody, _ := ioutil.ReadAll(rawResponse.Body)
- return fmt.Errorf("http %d: %s", rawResponse.StatusCode, responseBody)
- }
-
- responseBody, err := ioutil.ReadAll(rawResponse.Body)
- if err != nil {
- return fmt.Errorf("read: %s", err.Error())
- }
-
- // decode it into map string first, let mapstructure do the final decode
- // because it can be much stricter about unknown fields.
- respDataRaw := struct {
- Data interface{}
- Errors json.RawMessage
- }{}
- err = json.Unmarshal(responseBody, &respDataRaw)
- if err != nil {
- return fmt.Errorf("decode: %s", err.Error())
- }
-
- // we want to unpack even if there is an error, so we can see partial responses
- unpackErr := unpack(respDataRaw.Data, response)
-
- if respDataRaw.Errors != nil {
- return RawJsonError{respDataRaw.Errors}
- }
- return unpackErr
-}
-
-type RawJsonError struct {
- json.RawMessage
-}
-
-func (r RawJsonError) Error() string {
- return string(r.RawMessage)
-}
-
-func unpack(data interface{}, into interface{}) error {
- d, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{
- Result: into,
- TagName: "json",
- ErrorUnused: true,
- ZeroFields: true,
- })
- if err != nil {
- return fmt.Errorf("mapstructure: %s", err.Error())
- }
-
- return d.Decode(data)
-}
diff --git a/vendor/github.com/vektah/gqlgen/client/readme.md b/vendor/github.com/vektah/gqlgen/client/readme.md
deleted file mode 100644
index 755a1433..00000000
--- a/vendor/github.com/vektah/gqlgen/client/readme.md
+++ /dev/null
@@ -1,5 +0,0 @@
-This client is used internally for testing. I wanted a simple graphql client sent user specified queries.
-
-You might want to look at:
- - https://github.com/shurcooL/graphql: Uses reflection to build queries from structs.
- - https://github.com/machinebox/graphql: Probably would have been a perfect fit, but it uses form encoding instead of json...
diff --git a/vendor/github.com/vektah/gqlgen/client/websocket.go b/vendor/github.com/vektah/gqlgen/client/websocket.go
deleted file mode 100644
index bd92e3c0..00000000
--- a/vendor/github.com/vektah/gqlgen/client/websocket.go
+++ /dev/null
@@ -1,103 +0,0 @@
-package client
-
-import (
- "encoding/json"
- "fmt"
- "strings"
-
- "github.com/gorilla/websocket"
- "github.com/vektah/gqlparser/gqlerror"
-)
-
-const (
- connectionInitMsg = "connection_init" // Client -> Server
- startMsg = "start" // Client -> Server
- connectionAckMsg = "connection_ack" // Server -> Client
- dataMsg = "data" // Server -> Client
- errorMsg = "error" // Server -> Client
-)
-
-type operationMessage struct {
- Payload json.RawMessage `json:"payload,omitempty"`
- ID string `json:"id,omitempty"`
- Type string `json:"type"`
-}
-
-type Subscription struct {
- Close func() error
- Next func(response interface{}) error
-}
-
-func errorSubscription(err error) *Subscription {
- return &Subscription{
- Close: func() error { return nil },
- Next: func(response interface{}) error {
- return err
- },
- }
-}
-
-func (p *Client) Websocket(query string, options ...Option) *Subscription {
- r := p.mkRequest(query, options...)
- requestBody, err := json.Marshal(r)
- if err != nil {
- return errorSubscription(fmt.Errorf("encode: %s", err.Error()))
- }
-
- url := strings.Replace(p.url, "http://", "ws://", -1)
- url = strings.Replace(url, "https://", "wss://", -1)
-
- c, _, err := websocket.DefaultDialer.Dial(url, nil)
- if err != nil {
- return errorSubscription(fmt.Errorf("dial: %s", err.Error()))
- }
-
- if err = c.WriteJSON(operationMessage{Type: connectionInitMsg}); err != nil {
- return errorSubscription(fmt.Errorf("init: %s", err.Error()))
- }
-
- var ack operationMessage
- if err = c.ReadJSON(&ack); err != nil {
- return errorSubscription(fmt.Errorf("ack: %s", err.Error()))
- }
- if ack.Type != connectionAckMsg {
- return errorSubscription(fmt.Errorf("expected ack message, got %#v", ack))
- }
-
- if err = c.WriteJSON(operationMessage{Type: startMsg, ID: "1", Payload: requestBody}); err != nil {
- return errorSubscription(fmt.Errorf("start: %s", err.Error()))
- }
-
- return &Subscription{
- Close: c.Close,
- Next: func(response interface{}) error {
- var op operationMessage
- c.ReadJSON(&op)
- if op.Type != dataMsg {
- if op.Type == errorMsg {
- return fmt.Errorf(string(op.Payload))
- } else {
- return fmt.Errorf("expected data message, got %#v", op)
- }
- }
-
- respDataRaw := map[string]interface{}{}
- err = json.Unmarshal(op.Payload, &respDataRaw)
- if err != nil {
- return fmt.Errorf("decode: %s", err.Error())
- }
-
- if respDataRaw["errors"] != nil {
- var errs []*gqlerror.Error
- if err = unpack(respDataRaw["errors"], &errs); err != nil {
- return err
- }
- if len(errs) > 0 {
- return fmt.Errorf("errors: %s", errs)
- }
- }
-
- return unpack(respDataRaw["data"], response)
- },
- }
-}
diff --git a/vendor/github.com/vektah/gqlparser/.gitignore b/vendor/github.com/vektah/gqlparser/.gitignore
deleted file mode 100644
index 877392a7..00000000
--- a/vendor/github.com/vektah/gqlparser/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-/vendor
-/validator/imported/node_modules
-/validator/imported/graphql-js
-
-.idea/
diff --git a/vendor/github.com/vektah/gqlparser/.gometalinter.json b/vendor/github.com/vektah/gqlparser/.gometalinter.json
deleted file mode 100644
index e4e00223..00000000
--- a/vendor/github.com/vektah/gqlparser/.gometalinter.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "sort": ["path"],
- "Deadline": "5m",
- "Linters": {
- "errcheck": {
- "Command": "errcheck -abspath -ignore '[rR]ead|[wW]rite|Close'",
- "Pattern": "PATH:LINE:COL:MESSAGE",
- "InstallFrom": "github.com/kisielk/errcheck",
- "PartitionStrategy": "packages"
- }
- },
- "Disable": ["golint","gocyclo", "goconst", "gas", "interfacer", "vet","gosec"]
-}
diff --git a/vendor/github.com/vektah/gqlparser/LICENSE b/vendor/github.com/vektah/gqlparser/LICENSE
deleted file mode 100644
index 1221b9d3..00000000
--- a/vendor/github.com/vektah/gqlparser/LICENSE
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2018 Adam Scarr
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE. \ No newline at end of file
diff --git a/vendor/github.com/vektah/gqlparser/ast/argmap.go b/vendor/github.com/vektah/gqlparser/ast/argmap.go
deleted file mode 100644
index 43f6a3d6..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/argmap.go
+++ /dev/null
@@ -1,37 +0,0 @@
-package ast
-
-func arg2map(defs ArgumentDefinitionList, args ArgumentList, vars map[string]interface{}) map[string]interface{} {
- result := map[string]interface{}{}
- var err error
-
- for _, argDef := range defs {
- var val interface{}
- var hasValue bool
-
- if argValue := args.ForName(argDef.Name); argValue != nil {
- if argValue.Value.Kind == Variable {
- val, hasValue = vars[argValue.Value.Raw]
- } else {
- val, err = argValue.Value.Value(vars)
- if err != nil {
- panic(err)
- }
- hasValue = true
- }
- }
-
- if !hasValue && argDef.DefaultValue != nil {
- val, err = argDef.DefaultValue.Value(vars)
- if err != nil {
- panic(err)
- }
- hasValue = true
- }
-
- if hasValue {
- result[argDef.Name] = val
- }
- }
-
- return result
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/collections.go b/vendor/github.com/vektah/gqlparser/ast/collections.go
deleted file mode 100644
index 6bf67297..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/collections.go
+++ /dev/null
@@ -1,138 +0,0 @@
-package ast
-
-type FieldList []*FieldDefinition
-
-func (l FieldList) ForName(name string) *FieldDefinition {
- for _, it := range l {
- if it.Name == name {
- return it
- }
- }
- return nil
-}
-
-type EnumValueList []*EnumValueDefinition
-
-func (l EnumValueList) ForName(name string) *EnumValueDefinition {
- for _, it := range l {
- if it.Name == name {
- return it
- }
- }
- return nil
-}
-
-type DirectiveList []*Directive
-
-func (l DirectiveList) ForName(name string) *Directive {
- for _, it := range l {
- if it.Name == name {
- return it
- }
- }
- return nil
-}
-
-type OperationList []*OperationDefinition
-
-func (l OperationList) ForName(name string) *OperationDefinition {
- if name == "" && len(l) == 1 {
- return l[0]
- }
- for _, it := range l {
- if it.Name == name {
- return it
- }
- }
- return nil
-}
-
-type FragmentDefinitionList []*FragmentDefinition
-
-func (l FragmentDefinitionList) ForName(name string) *FragmentDefinition {
- for _, it := range l {
- if it.Name == name {
- return it
- }
- }
- return nil
-}
-
-type VariableDefinitionList []*VariableDefinition
-
-func (l VariableDefinitionList) ForName(name string) *VariableDefinition {
- for _, it := range l {
- if it.Variable == name {
- return it
- }
- }
- return nil
-}
-
-type ArgumentList []*Argument
-
-func (l ArgumentList) ForName(name string) *Argument {
- for _, it := range l {
- if it.Name == name {
- return it
- }
- }
- return nil
-}
-
-type ArgumentDefinitionList []*ArgumentDefinition
-
-func (l ArgumentDefinitionList) ForName(name string) *ArgumentDefinition {
- for _, it := range l {
- if it.Name == name {
- return it
- }
- }
- return nil
-}
-
-type SchemaDefinitionList []*SchemaDefinition
-
-type DirectiveDefinitionList []*DirectiveDefinition
-
-func (l DirectiveDefinitionList) ForName(name string) *DirectiveDefinition {
- for _, it := range l {
- if it.Name == name {
- return it
- }
- }
- return nil
-}
-
-type DefinitionList []*Definition
-
-func (l DefinitionList) ForName(name string) *Definition {
- for _, it := range l {
- if it.Name == name {
- return it
- }
- }
- return nil
-}
-
-type OperationTypeDefinitionList []*OperationTypeDefinition
-
-func (l OperationTypeDefinitionList) ForType(name string) *OperationTypeDefinition {
- for _, it := range l {
- if it.Type == name {
- return it
- }
- }
- return nil
-}
-
-type ChildValueList []*ChildValue
-
-func (v ChildValueList) ForName(name string) *Value {
- for _, f := range v {
- if f.Name == name {
- return f.Value
- }
- }
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/definition.go b/vendor/github.com/vektah/gqlparser/ast/definition.go
deleted file mode 100644
index f5c8ea37..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/definition.go
+++ /dev/null
@@ -1,93 +0,0 @@
-package ast
-
-type DefinitionKind string
-
-const (
- Scalar DefinitionKind = "SCALAR"
- Object DefinitionKind = "OBJECT"
- Interface DefinitionKind = "INTERFACE"
- Union DefinitionKind = "UNION"
- Enum DefinitionKind = "ENUM"
- InputObject DefinitionKind = "INPUT_OBJECT"
-)
-
-// ObjectDefinition is the core type definition object, it includes all of the definable types
-// but does *not* cover schema or directives.
-//
-// @vektah: Javascript implementation has different types for all of these, but they are
-// more similar than different and don't define any behaviour. I think this style of
-// "some hot" struct works better, at least for go.
-//
-// Type extensions are also represented by this same struct.
-type Definition struct {
- Kind DefinitionKind
- Description string
- Name string
- Directives DirectiveList
- Interfaces []string // object and input object
- Fields FieldList // object and input object
- Types []string // union
- EnumValues EnumValueList // enum
-
- Position *Position `dump:"-"`
- BuiltIn bool `dump:"-"`
-}
-
-func (d *Definition) IsLeafType() bool {
- return d.Kind == Enum || d.Kind == Scalar
-}
-
-func (d *Definition) IsAbstractType() bool {
- return d.Kind == Interface || d.Kind == Union
-}
-
-func (d *Definition) IsCompositeType() bool {
- return d.Kind == Object || d.Kind == Interface || d.Kind == Union
-}
-
-func (d *Definition) IsInputType() bool {
- return d.Kind == Scalar || d.Kind == Enum || d.Kind == InputObject
-}
-
-func (d *Definition) OneOf(types ...string) bool {
- for _, t := range types {
- if d.Name == t {
- return true
- }
- }
- return false
-}
-
-type FieldDefinition struct {
- Description string
- Name string
- Arguments ArgumentDefinitionList // only for objects
- DefaultValue *Value // only for input objects
- Type *Type
- Directives DirectiveList
- Position *Position `dump:"-"`
-}
-
-type ArgumentDefinition struct {
- Description string
- Name string
- DefaultValue *Value
- Type *Type
- Directives DirectiveList
- Position *Position `dump:"-"`
-}
-
-type EnumValueDefinition struct {
- Description string
- Name string
- Directives DirectiveList
- Position *Position `dump:"-"`
-}
-
-type DirectiveDefinition struct {
- Description string
- Name string
- Arguments ArgumentDefinitionList
- Locations []DirectiveLocation
- Position *Position `dump:"-"`
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/directive.go b/vendor/github.com/vektah/gqlparser/ast/directive.go
deleted file mode 100644
index 9b07c92a..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/directive.go
+++ /dev/null
@@ -1,42 +0,0 @@
-package ast
-
-type DirectiveLocation string
-
-const (
- // Executable
- LocationQuery DirectiveLocation = `QUERY`
- LocationMutation DirectiveLocation = `MUTATION`
- LocationSubscription DirectiveLocation = `SUBSCRIPTION`
- LocationField DirectiveLocation = `FIELD`
- LocationFragmentDefinition DirectiveLocation = `FRAGMENT_DEFINITION`
- LocationFragmentSpread DirectiveLocation = `FRAGMENT_SPREAD`
- LocationInlineFragment DirectiveLocation = `INLINE_FRAGMENT`
-
- // Type System
- LocationSchema DirectiveLocation = `SCHEMA`
- LocationScalar DirectiveLocation = `SCALAR`
- LocationObject DirectiveLocation = `OBJECT`
- LocationFieldDefinition DirectiveLocation = `FIELD_DEFINITION`
- LocationArgumentDefinition DirectiveLocation = `ARGUMENT_DEFINITION`
- LocationInterface DirectiveLocation = `INTERFACE`
- LocationUnion DirectiveLocation = `UNION`
- LocationEnum DirectiveLocation = `ENUM`
- LocationEnumValue DirectiveLocation = `ENUM_VALUE`
- LocationInputObject DirectiveLocation = `INPUT_OBJECT`
- LocationInputFieldDefinition DirectiveLocation = `INPUT_FIELD_DEFINITION`
-)
-
-type Directive struct {
- Name string
- Arguments ArgumentList
- Position *Position `dump:"-"`
-
- // Requires validation
- ParentDefinition *Definition
- Definition *DirectiveDefinition
- Location DirectiveLocation
-}
-
-func (d *Directive) ArgumentMap(vars map[string]interface{}) map[string]interface{} {
- return arg2map(d.Definition.Arguments, d.Arguments, vars)
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/document.go b/vendor/github.com/vektah/gqlparser/ast/document.go
deleted file mode 100644
index 4672d0c0..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/document.go
+++ /dev/null
@@ -1,67 +0,0 @@
-package ast
-
-type QueryDocument struct {
- Operations OperationList
- Fragments FragmentDefinitionList
- Position *Position `dump:"-"`
-}
-
-type SchemaDocument struct {
- Schema SchemaDefinitionList
- SchemaExtension SchemaDefinitionList
- Directives DirectiveDefinitionList
- Definitions DefinitionList
- Extensions DefinitionList
- Position *Position `dump:"-"`
-}
-
-func (d *SchemaDocument) Merge(other *SchemaDocument) {
- d.Schema = append(d.Schema, other.Schema...)
- d.SchemaExtension = append(d.SchemaExtension, other.SchemaExtension...)
- d.Directives = append(d.Directives, other.Directives...)
- d.Definitions = append(d.Definitions, other.Definitions...)
- d.Extensions = append(d.Extensions, other.Extensions...)
-}
-
-type Schema struct {
- Query *Definition
- Mutation *Definition
- Subscription *Definition
-
- Types map[string]*Definition
- Directives map[string]*DirectiveDefinition
-
- PossibleTypes map[string][]*Definition
- Implements map[string][]*Definition
-}
-
-func (s *Schema) AddPossibleType(name string, def *Definition) {
- s.PossibleTypes[name] = append(s.PossibleTypes[name], def)
-}
-
-// GetPossibleTypes will enumerate all the definitions for a given interface or union
-func (s *Schema) GetPossibleTypes(def *Definition) []*Definition {
- return s.PossibleTypes[def.Name]
-}
-
-func (s *Schema) AddImplements(name string, iface *Definition) {
- s.Implements[name] = append(s.Implements[name], iface)
-}
-
-// GetImplements returns all the interface and union definitions that the given definition satisfies
-func (s *Schema) GetImplements(def *Definition) []*Definition {
- return s.Implements[def.Name]
-}
-
-type SchemaDefinition struct {
- Description string
- Directives DirectiveList
- OperationTypes OperationTypeDefinitionList
- Position *Position `dump:"-"`
-}
-
-type OperationTypeDefinition struct {
- Operation Operation
- Type string
- Position *Position `dump:"-"`
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/dumper.go b/vendor/github.com/vektah/gqlparser/ast/dumper.go
deleted file mode 100644
index dbb7a7ef..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/dumper.go
+++ /dev/null
@@ -1,159 +0,0 @@
-package ast
-
-import (
- "bytes"
- "fmt"
- "reflect"
- "strconv"
- "strings"
-)
-
-// Dump turns ast into a stable string format for assertions in tests
-func Dump(i interface{}) string {
- v := reflect.ValueOf(i)
-
- d := dumper{Buffer: &bytes.Buffer{}}
- d.dump(v)
-
- return d.String()
-}
-
-type dumper struct {
- *bytes.Buffer
- indent int
-}
-
-type Dumpable interface {
- Dump() string
-}
-
-func (d *dumper) dump(v reflect.Value) {
- if dumpable, isDumpable := v.Interface().(Dumpable); isDumpable {
- d.WriteString(dumpable.Dump())
- return
- }
- switch v.Kind() {
- case reflect.Bool:
- if v.Bool() {
- d.WriteString("true")
- } else {
- d.WriteString("false")
- }
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- d.WriteString(fmt.Sprintf("%d", v.Int()))
-
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- d.WriteString(fmt.Sprintf("%d", v.Uint()))
-
- case reflect.Float32, reflect.Float64:
- d.WriteString(fmt.Sprintf("%.2f", v.Float()))
-
- case reflect.String:
- if v.Type().Name() != "string" {
- d.WriteString(v.Type().Name() + "(" + strconv.Quote(v.String()) + ")")
- } else {
- d.WriteString(strconv.Quote(v.String()))
- }
-
- case reflect.Array, reflect.Slice:
- d.dumpArray(v)
-
- case reflect.Interface, reflect.Ptr:
- d.dumpPtr(v)
-
- case reflect.Struct:
- d.dumpStruct(v)
-
- default:
- panic(fmt.Errorf("unsupported kind: %s\n buf: %s", v.Kind().String(), d.String()))
- }
-}
-
-func (d *dumper) writeIndent() {
- d.Buffer.WriteString(strings.Repeat(" ", d.indent))
-}
-
-func (d *dumper) nl() {
- d.Buffer.WriteByte('\n')
- d.writeIndent()
-}
-
-func typeName(t reflect.Type) string {
- if t.Kind() == reflect.Ptr {
- return typeName(t.Elem())
- }
- return t.Name()
-}
-
-func (d *dumper) dumpArray(v reflect.Value) {
- d.WriteString("[" + typeName(v.Type().Elem()) + "]")
-
- for i := 0; i < v.Len(); i++ {
- d.nl()
- d.WriteString("- ")
- d.indent++
- d.dump(v.Index(i))
- d.indent--
- }
-}
-
-func (d *dumper) dumpStruct(v reflect.Value) {
- d.WriteString("<" + v.Type().Name() + ">")
- d.indent++
-
- typ := v.Type()
- for i := 0; i < v.NumField(); i++ {
- f := v.Field(i)
- if typ.Field(i).Tag.Get("dump") == "-" {
- continue
- }
-
- if isZero(f) {
- continue
- }
- d.nl()
- d.WriteString(typ.Field(i).Name)
- d.WriteString(": ")
- d.dump(v.Field(i))
- }
-
- d.indent--
-}
-
-func isZero(v reflect.Value) bool {
- switch v.Kind() {
- case reflect.Ptr, reflect.Interface:
- return v.IsNil()
- case reflect.Func, reflect.Map:
- return v.IsNil()
-
- case reflect.Array, reflect.Slice:
- if v.IsNil() {
- return true
- }
- z := true
- for i := 0; i < v.Len(); i++ {
- z = z && isZero(v.Index(i))
- }
- return z
- case reflect.Struct:
- z := true
- for i := 0; i < v.NumField(); i++ {
- z = z && isZero(v.Field(i))
- }
- return z
- case reflect.String:
- return v.String() == ""
- }
-
- // Compare other types directly:
- return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()))
-}
-
-func (d *dumper) dumpPtr(v reflect.Value) {
- if v.IsNil() {
- d.WriteString("nil")
- return
- }
- d.dump(v.Elem())
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/fragment.go b/vendor/github.com/vektah/gqlparser/ast/fragment.go
deleted file mode 100644
index 57ab56c7..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/fragment.go
+++ /dev/null
@@ -1,38 +0,0 @@
-package ast
-
-type FragmentSpread struct {
- Name string
- Directives DirectiveList
-
- // Require validation
- ObjectDefinition *Definition
- Definition *FragmentDefinition
-
- Position *Position `dump:"-"`
-}
-
-type InlineFragment struct {
- TypeCondition string
- Directives DirectiveList
- SelectionSet SelectionSet
-
- // Require validation
- ObjectDefinition *Definition
-
- Position *Position `dump:"-"`
-}
-
-type FragmentDefinition struct {
- Name string
- // Note: fragment variable definitions are experimental and may be changed
- // or removed in the future.
- VariableDefinition VariableDefinitionList
- TypeCondition string
- Directives DirectiveList
- SelectionSet SelectionSet
-
- // Require validation
- Definition *Definition
-
- Position *Position `dump:"-"`
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/operation.go b/vendor/github.com/vektah/gqlparser/ast/operation.go
deleted file mode 100644
index 03e916a0..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/operation.go
+++ /dev/null
@@ -1,29 +0,0 @@
-package ast
-
-type Operation string
-
-const (
- Query Operation = "query"
- Mutation Operation = "mutation"
- Subscription Operation = "subscription"
-)
-
-type OperationDefinition struct {
- Operation Operation
- Name string
- VariableDefinitions VariableDefinitionList
- Directives DirectiveList
- SelectionSet SelectionSet
- Position *Position `dump:"-"`
-}
-
-type VariableDefinition struct {
- Variable string
- Type *Type
- DefaultValue *Value
- Position *Position `dump:"-"`
-
- // Requires validation
- Definition *Definition
- Used bool `dump:"-"`
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/selection.go b/vendor/github.com/vektah/gqlparser/ast/selection.go
deleted file mode 100644
index 159db844..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/selection.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package ast
-
-type SelectionSet []Selection
-
-type Selection interface {
- isSelection()
- GetPosition() *Position
-}
-
-func (*Field) isSelection() {}
-func (*FragmentSpread) isSelection() {}
-func (*InlineFragment) isSelection() {}
-
-func (s *Field) GetPosition() *Position { return s.Position }
-func (s *FragmentSpread) GetPosition() *Position { return s.Position }
-func (s *InlineFragment) GetPosition() *Position { return s.Position }
-
-type Field struct {
- Alias string
- Name string
- Arguments ArgumentList
- Directives DirectiveList
- SelectionSet SelectionSet
- Position *Position `dump:"-"`
-
- // Require validation
- Definition *FieldDefinition
- ObjectDefinition *Definition
-}
-
-type Argument struct {
- Name string
- Value *Value
- Position *Position `dump:"-"`
-}
-
-func (f *Field) ArgumentMap(vars map[string]interface{}) map[string]interface{} {
- return arg2map(f.Definition.Arguments, f.Arguments, vars)
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/source.go b/vendor/github.com/vektah/gqlparser/ast/source.go
deleted file mode 100644
index acb07ba6..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/source.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package ast
-
-type Source struct {
- Name string
- Input string
- BuiltIn bool
-}
-
-type Position struct {
- Start int // The starting position, in runes, of this token in the input.
- End int // The end position, in runes, of this token in the input.
- Line int // The line number at the start of this item.
- Column int // The line number at the start of this item.
- Src *Source // The source document this token belongs to
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/type.go b/vendor/github.com/vektah/gqlparser/ast/type.go
deleted file mode 100644
index 9577fdb4..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/type.go
+++ /dev/null
@@ -1,68 +0,0 @@
-package ast
-
-func NonNullNamedType(named string, pos *Position) *Type {
- return &Type{NamedType: named, NonNull: true, Position: pos}
-}
-
-func NamedType(named string, pos *Position) *Type {
- return &Type{NamedType: named, NonNull: false, Position: pos}
-}
-
-func NonNullListType(elem *Type, pos *Position) *Type {
- return &Type{Elem: elem, NonNull: true, Position: pos}
-}
-
-func ListType(elem *Type, pos *Position) *Type {
- return &Type{Elem: elem, NonNull: false, Position: pos}
-}
-
-type Type struct {
- NamedType string
- Elem *Type
- NonNull bool
- Position *Position `dump:"-"`
-}
-
-func (t *Type) Name() string {
- if t.NamedType != "" {
- return t.NamedType
- }
-
- return t.Elem.Name()
-}
-
-func (t *Type) String() string {
- nn := ""
- if t.NonNull {
- nn = "!"
- }
- if t.NamedType != "" {
- return t.NamedType + nn
- }
-
- return "[" + t.Elem.String() + "]" + nn
-}
-
-func (t *Type) IsCompatible(other *Type) bool {
- if t.NamedType != other.NamedType {
- return false
- }
-
- if t.Elem != nil && other.Elem == nil {
- return false
- }
-
- if t.Elem != nil && !t.Elem.IsCompatible(other.Elem) {
- return false
- }
-
- if other.NonNull {
- return t.NonNull
- }
-
- return true
-}
-
-func (v *Type) Dump() string {
- return v.String()
-}
diff --git a/vendor/github.com/vektah/gqlparser/ast/value.go b/vendor/github.com/vektah/gqlparser/ast/value.go
deleted file mode 100644
index c25ef150..00000000
--- a/vendor/github.com/vektah/gqlparser/ast/value.go
+++ /dev/null
@@ -1,120 +0,0 @@
-package ast
-
-import (
- "fmt"
- "strconv"
- "strings"
-)
-
-type ValueKind int
-
-const (
- Variable ValueKind = iota
- IntValue
- FloatValue
- StringValue
- BlockValue
- BooleanValue
- NullValue
- EnumValue
- ListValue
- ObjectValue
-)
-
-type Value struct {
- Raw string
- Children ChildValueList
- Kind ValueKind
- Position *Position `dump:"-"`
-
- // Require validation
- Definition *Definition
- VariableDefinition *VariableDefinition
- ExpectedType *Type
-}
-
-type ChildValue struct {
- Name string
- Value *Value
- Position *Position `dump:"-"`
-}
-
-func (v *Value) Value(vars map[string]interface{}) (interface{}, error) {
- if v == nil {
- return nil, nil
- }
- switch v.Kind {
- case Variable:
- if value, ok := vars[v.Raw]; ok {
- return value, nil
- }
- if v.VariableDefinition != nil && v.VariableDefinition.DefaultValue != nil {
- return v.VariableDefinition.DefaultValue.Value(vars)
- }
- return nil, nil
- case IntValue:
- return strconv.ParseInt(v.Raw, 10, 64)
- case FloatValue:
- return strconv.ParseFloat(v.Raw, 64)
- case StringValue, BlockValue, EnumValue:
- return v.Raw, nil
- case BooleanValue:
- return strconv.ParseBool(v.Raw)
- case NullValue:
- return nil, nil
- case ListValue:
- var val []interface{}
- for _, elem := range v.Children {
- elemVal, err := elem.Value.Value(vars)
- if err != nil {
- return val, err
- }
- val = append(val, elemVal)
- }
- return val, nil
- case ObjectValue:
- val := map[string]interface{}{}
- for _, elem := range v.Children {
- elemVal, err := elem.Value.Value(vars)
- if err != nil {
- return val, err
- }
- val[elem.Name] = elemVal
- }
- return val, nil
- default:
- panic(fmt.Errorf("unknown value kind %d", v.Kind))
- }
-}
-
-func (v *Value) String() string {
- if v == nil {
- return "<nil>"
- }
- switch v.Kind {
- case Variable:
- return "$" + v.Raw
- case IntValue, FloatValue, EnumValue, BooleanValue, NullValue:
- return v.Raw
- case StringValue, BlockValue:
- return strconv.Quote(v.Raw)
- case ListValue:
- var val []string
- for _, elem := range v.Children {
- val = append(val, elem.Value.String())
- }
- return "[" + strings.Join(val, ",") + "]"
- case ObjectValue:
- var val []string
- for _, elem := range v.Children {
- val = append(val, elem.Name+":"+elem.Value.String())
- }
- return "{" + strings.Join(val, ",") + "}"
- default:
- panic(fmt.Errorf("unknown value kind %d", v.Kind))
- }
-}
-
-func (v *Value) Dump() string {
- return v.String()
-}
diff --git a/vendor/github.com/vektah/gqlparser/gqlerror/error.go b/vendor/github.com/vektah/gqlparser/gqlerror/error.go
deleted file mode 100644
index c4c0847a..00000000
--- a/vendor/github.com/vektah/gqlparser/gqlerror/error.go
+++ /dev/null
@@ -1,133 +0,0 @@
-package gqlerror
-
-import (
- "bytes"
- "fmt"
- "strconv"
-
- "github.com/vektah/gqlparser/ast"
-)
-
-// Error is the standard graphql error type described in https://facebook.github.io/graphql/draft/#sec-Errors
-type Error struct {
- Message string `json:"message"`
- Path []interface{} `json:"path,omitempty"`
- Locations []Location `json:"locations,omitempty"`
- Extensions map[string]interface{} `json:"extensions,omitempty"`
- Rule string `json:"-"`
-}
-
-func (err *Error) SetFile(file string) {
- if file == "" {
- return
- }
- if err.Extensions == nil {
- err.Extensions = map[string]interface{}{}
- }
-
- err.Extensions["file"] = file
-}
-
-type Location struct {
- Line int `json:"line,omitempty"`
- Column int `json:"column,omitempty"`
-}
-
-type List []*Error
-
-func (err *Error) Error() string {
- var res bytes.Buffer
- if err == nil {
- return ""
- }
- filename, _ := err.Extensions["file"].(string)
- if filename == "" {
- filename = "input"
- }
- res.WriteString(filename)
-
- if len(err.Locations) > 0 {
- res.WriteByte(':')
- res.WriteString(strconv.Itoa(err.Locations[0].Line))
- }
-
- res.WriteString(": ")
- if ps := err.pathString(); ps != "" {
- res.WriteString(ps)
- res.WriteByte(' ')
- }
-
- res.WriteString(err.Message)
-
- return res.String()
-}
-
-func (err Error) pathString() string {
- var str bytes.Buffer
- for i, v := range err.Path {
-
- switch v := v.(type) {
- case int, int64:
- str.WriteString(fmt.Sprintf("[%d]", v))
- default:
- if i != 0 {
- str.WriteByte('.')
- }
- str.WriteString(fmt.Sprint(v))
- }
- }
- return str.String()
-}
-
-func (errs List) Error() string {
- var buf bytes.Buffer
- for _, err := range errs {
- buf.WriteString(err.Error())
- buf.WriteByte('\n')
- }
- return buf.String()
-}
-
-func WrapPath(path []interface{}, err error) *Error {
- return &Error{
- Message: err.Error(),
- Path: path,
- }
-}
-
-func Errorf(message string, args ...interface{}) *Error {
- return &Error{
- Message: fmt.Sprintf(message, args...),
- }
-}
-
-func ErrorPathf(path []interface{}, message string, args ...interface{}) *Error {
- return &Error{
- Message: fmt.Sprintf(message, args...),
- Path: path,
- }
-}
-
-func ErrorPosf(pos *ast.Position, message string, args ...interface{}) *Error {
- return ErrorLocf(
- pos.Src.Name,
- pos.Line,
- pos.Column,
- message,
- args...,
- )
-}
-
-func ErrorLocf(file string, line int, col int, message string, args ...interface{}) *Error {
- var extensions map[string]interface{}
- if file != "" {
- extensions = map[string]interface{}{"file": file}
- }
- return &Error{
- Message: fmt.Sprintf(message, args...),
- Extensions: extensions,
- Locations: []Location{
- {Line: line, Column: col},
- },
- }
-}
diff --git a/vendor/github.com/vektah/gqlparser/gqlparser.go b/vendor/github.com/vektah/gqlparser/gqlparser.go
deleted file mode 100644
index 71e46407..00000000
--- a/vendor/github.com/vektah/gqlparser/gqlparser.go
+++ /dev/null
@@ -1,42 +0,0 @@
-package gqlparser
-
-import (
- "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
- "github.com/vektah/gqlparser/parser"
- "github.com/vektah/gqlparser/validator"
- _ "github.com/vektah/gqlparser/validator/rules"
-)
-
-func LoadSchema(str ...*ast.Source) (*ast.Schema, *gqlerror.Error) {
- return validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...)
-}
-
-func MustLoadSchema(str ...*ast.Source) *ast.Schema {
- s, err := validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...)
- if err != nil {
- panic(err)
- }
- return s
-}
-
-func LoadQuery(schema *ast.Schema, str string) (*ast.QueryDocument, gqlerror.List) {
- query, err := parser.ParseQuery(&ast.Source{Input: str})
- if err != nil {
- return nil, gqlerror.List{err}
- }
- errs := validator.Validate(schema, query)
- if errs != nil {
- return nil, errs
- }
-
- return query, nil
-}
-
-func MustLoadQuery(schema *ast.Schema, str string) *ast.QueryDocument {
- q, err := LoadQuery(schema, str)
- if err != nil {
- panic(err)
- }
- return q
-}
diff --git a/vendor/github.com/vektah/gqlparser/lexer/blockstring.go b/vendor/github.com/vektah/gqlparser/lexer/blockstring.go
deleted file mode 100644
index 4065a610..00000000
--- a/vendor/github.com/vektah/gqlparser/lexer/blockstring.go
+++ /dev/null
@@ -1,58 +0,0 @@
-package lexer
-
-import (
- "math"
- "strings"
-)
-
-// blockStringValue produces the value of a block string from its parsed raw value, similar to
-// Coffeescript's block string, Python's docstring trim or Ruby's strip_heredoc.
-//
-// This implements the GraphQL spec's BlockStringValue() static algorithm.
-func blockStringValue(raw string) string {
- lines := strings.Split(raw, "\n")
-
- commonIndent := math.MaxInt32
- for _, line := range lines {
- indent := leadingWhitespace(line)
- if indent < len(line) && indent < commonIndent {
- commonIndent = indent
- if commonIndent == 0 {
- break
- }
- }
- }
-
- if commonIndent != math.MaxInt32 && len(lines) > 0 {
- for i := 1; i < len(lines); i++ {
- if len(lines[i]) < commonIndent {
- lines[i] = ""
- } else {
- lines[i] = lines[i][commonIndent:]
- }
- }
- }
-
- start := 0
- end := len(lines)
-
- for start < end && leadingWhitespace(lines[start]) == math.MaxInt32 {
- start++
- }
-
- for start < end && leadingWhitespace(lines[end-1]) == math.MaxInt32 {
- end--
- }
-
- return strings.Join(lines[start:end], "\n")
-}
-
-func leadingWhitespace(str string) int {
- for i, r := range str {
- if r != ' ' && r != '\t' {
- return i
- }
- }
- // this line is made up entirely of whitespace, its leading whitespace doesnt count.
- return math.MaxInt32
-}
diff --git a/vendor/github.com/vektah/gqlparser/lexer/lexer.go b/vendor/github.com/vektah/gqlparser/lexer/lexer.go
deleted file mode 100644
index 89687857..00000000
--- a/vendor/github.com/vektah/gqlparser/lexer/lexer.go
+++ /dev/null
@@ -1,510 +0,0 @@
-package lexer
-
-import (
- "bytes"
- "unicode/utf8"
-
- "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
-)
-
-// Lexer turns graphql request and schema strings into tokens
-type Lexer struct {
- *ast.Source
- // An offset into the string in bytes
- start int
- // An offset into the string in runes
- startRunes int
- // An offset into the string in bytes
- end int
- // An offset into the string in runes
- endRunes int
- // the current line number
- line int
- // An offset into the string in rune
- lineStartRunes int
-}
-
-func New(src *ast.Source) Lexer {
- return Lexer{
- Source: src,
- line: 1,
- }
-}
-
-// take one rune from input and advance end
-func (s *Lexer) peek() (rune, int) {
- return utf8.DecodeRuneInString(s.Input[s.end:])
-}
-
-func (s *Lexer) makeToken(kind Type) (Token, *gqlerror.Error) {
- return s.makeValueToken(kind, s.Input[s.start:s.end])
-}
-
-func (s *Lexer) makeValueToken(kind Type, value string) (Token, *gqlerror.Error) {
- return Token{
- Kind: kind,
- Value: value,
- Pos: ast.Position{
- Start: s.startRunes,
- End: s.endRunes,
- Line: s.line,
- Column: s.startRunes - s.lineStartRunes + 1,
- Src: s.Source,
- },
- }, nil
-}
-
-func (s *Lexer) makeError(format string, args ...interface{}) (Token, *gqlerror.Error) {
- column := s.endRunes - s.lineStartRunes + 1
- return Token{
- Kind: Invalid,
- Pos: ast.Position{
- Start: s.startRunes,
- End: s.endRunes,
- Line: s.line,
- Column: column,
- Src: s.Source,
- },
- }, gqlerror.ErrorLocf(s.Source.Name, s.line, column, format, args...)
-}
-
-// ReadToken gets the next token from the source starting at the given position.
-//
-// This skips over whitespace and comments until it finds the next lexable
-// token, then lexes punctuators immediately or calls the appropriate helper
-// function for more complicated tokens.
-func (s *Lexer) ReadToken() (token Token, err *gqlerror.Error) {
-
- s.ws()
- s.start = s.end
- s.startRunes = s.endRunes
-
- if s.end >= len(s.Input) {
- return s.makeToken(EOF)
- }
- r := s.Input[s.start]
- s.end++
- s.endRunes++
- switch r {
- case '!':
- return s.makeValueToken(Bang, "")
-
- case '$':
- return s.makeValueToken(Dollar, "")
- case '&':
- return s.makeValueToken(Amp, "")
- case '(':
- return s.makeValueToken(ParenL, "")
- case ')':
- return s.makeValueToken(ParenR, "")
- case '.':
- if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == "..." {
- s.end += 2
- s.endRunes += 2
- return s.makeValueToken(Spread, "")
- }
- case ':':
- return s.makeValueToken(Colon, "")
- case '=':
- return s.makeValueToken(Equals, "")
- case '@':
- return s.makeValueToken(At, "")
- case '[':
- return s.makeValueToken(BracketL, "")
- case ']':
- return s.makeValueToken(BracketR, "")
- case '{':
- return s.makeValueToken(BraceL, "")
- case '}':
- return s.makeValueToken(BraceR, "")
- case '|':
- return s.makeValueToken(Pipe, "")
- case '#':
- s.readComment()
- return s.ReadToken()
-
- case '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z':
- return s.readName()
-
- case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
- return s.readNumber()
-
- case '"':
- if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == `"""` {
- return s.readBlockString()
- }
-
- return s.readString()
- }
-
- s.end--
- s.endRunes--
-
- if r < 0x0020 && r != 0x0009 && r != 0x000a && r != 0x000d {
- return s.makeError(`Cannot contain the invalid character "\u%04d"`, r)
- }
-
- if r == '\'' {
- return s.makeError(`Unexpected single quote character ('), did you mean to use a double quote (")?`)
- }
-
- return s.makeError(`Cannot parse the unexpected character "%s".`, string(r))
-}
-
-// ws reads from body starting at startPosition until it finds a non-whitespace
-// or commented character, and updates the token end to include all whitespace
-func (s *Lexer) ws() {
- for s.end < len(s.Input) {
- switch s.Input[s.end] {
- case '\t', ' ', ',':
- s.end++
- s.endRunes++
- case '\n':
- s.end++
- s.endRunes++
- s.line++
- s.lineStartRunes = s.endRunes
- case '\r':
- s.end++
- s.endRunes++
- s.line++
- s.lineStartRunes = s.endRunes
- // skip the following newline if its there
- if s.end < len(s.Input) && s.Input[s.end] == '\n' {
- s.end++
- s.endRunes++
- }
- // byte order mark, given ws is hot path we aren't relying on the unicode package here.
- case 0xef:
- if s.end+2 < len(s.Input) && s.Input[s.end+1] == 0xBB && s.Input[s.end+2] == 0xBF {
- s.end += 3
- s.endRunes++
- } else {
- return
- }
- default:
- return
- }
- }
-}
-
-// readComment from the input
-//
-// #[\u0009\u0020-\uFFFF]*
-func (s *Lexer) readComment() (Token, *gqlerror.Error) {
- for s.end < len(s.Input) {
- r, w := s.peek()
-
- // SourceCharacter but not LineTerminator
- if r > 0x001f || r == '\t' {
- s.end += w
- s.endRunes++
- } else {
- break
- }
- }
-
- return s.makeToken(Comment)
-}
-
-// readNumber from the input, either a float
-// or an int depending on whether a decimal point appears.
-//
-// Int: -?(0|[1-9][0-9]*)
-// Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)?
-func (s *Lexer) readNumber() (Token, *gqlerror.Error) {
- float := false
-
- // backup to the first digit
- s.end--
- s.endRunes--
-
- s.acceptByte('-')
-
- if s.acceptByte('0') {
- if consumed := s.acceptDigits(); consumed != 0 {
- s.end -= consumed
- s.endRunes -= consumed
- return s.makeError("Invalid number, unexpected digit after 0: %s.", s.describeNext())
- }
- } else {
- if consumed := s.acceptDigits(); consumed == 0 {
- return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext())
- }
- }
-
- if s.acceptByte('.') {
- float = true
-
- if consumed := s.acceptDigits(); consumed == 0 {
- return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext())
- }
- }
-
- if s.acceptByte('e', 'E') {
- float = true
-
- s.acceptByte('-', '+')
-
- if consumed := s.acceptDigits(); consumed == 0 {
- return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext())
- }
- }
-
- if float {
- return s.makeToken(Float)
- } else {
- return s.makeToken(Int)
- }
-}
-
-// acceptByte if it matches any of given bytes, returning true if it found anything
-func (s *Lexer) acceptByte(bytes ...uint8) bool {
- if s.end >= len(s.Input) {
- return false
- }
-
- for _, accepted := range bytes {
- if s.Input[s.end] == accepted {
- s.end++
- s.endRunes++
- return true
- }
- }
- return false
-}
-
-// acceptDigits from the input, returning the number of digits it found
-func (s *Lexer) acceptDigits() int {
- consumed := 0
- for s.end < len(s.Input) && s.Input[s.end] >= '0' && s.Input[s.end] <= '9' {
- s.end++
- s.endRunes++
- consumed++
- }
-
- return consumed
-}
-
-// describeNext peeks at the input and returns a human readable string. This should will alloc
-// and should only be used in errors
-func (s *Lexer) describeNext() string {
- if s.end < len(s.Input) {
- return `"` + string(s.Input[s.end]) + `"`
- }
- return "<EOF>"
-}
-
-// readString from the input
-//
-// "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*"
-func (s *Lexer) readString() (Token, *gqlerror.Error) {
- inputLen := len(s.Input)
-
- // this buffer is lazily created only if there are escape characters.
- var buf *bytes.Buffer
-
- // skip the opening quote
- s.start++
- s.startRunes++
-
- for s.end < inputLen {
- r := s.Input[s.end]
- if r == '\n' || r == '\r' {
- break
- }
- if r < 0x0020 && r != '\t' {
- return s.makeError(`Invalid character within String: "\u%04d".`, r)
- }
- switch r {
- default:
- var char = rune(r)
- var w = 1
-
- // skip unicode overhead if we are in the ascii range
- if r >= 127 {
- char, w = utf8.DecodeRuneInString(s.Input[s.end:])
- }
- s.end += w
- s.endRunes++
-
- if buf != nil {
- buf.WriteRune(char)
- }
-
- case '"':
- t, err := s.makeToken(String)
- // the token should not include the quotes in its value, but should cover them in its position
- t.Pos.Start--
- t.Pos.End++
-
- if buf != nil {
- t.Value = buf.String()
- }
-
- // skip the close quote
- s.end++
- s.endRunes++
-
- return t, err
-
- case '\\':
- if s.end+1 >= inputLen {
- s.end++
- s.endRunes++
- return s.makeError(`Invalid character escape sequence.`)
- }
-
- if buf == nil {
- buf = bytes.NewBufferString(s.Input[s.start:s.end])
- }
-
- escape := s.Input[s.end+1]
-
- if escape == 'u' {
- if s.end+6 >= inputLen {
- s.end++
- s.endRunes++
- return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:])
- }
-
- r, ok := unhex(s.Input[s.end+2 : s.end+6])
- if !ok {
- s.end++
- s.endRunes++
- return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:s.end+5])
- }
- buf.WriteRune(r)
- s.end += 6
- s.endRunes += 6
- } else {
- switch escape {
- case '"', '/', '\\':
- buf.WriteByte(escape)
- case 'b':
- buf.WriteByte('\b')
- case 'f':
- buf.WriteByte('\f')
- case 'n':
- buf.WriteByte('\n')
- case 'r':
- buf.WriteByte('\r')
- case 't':
- buf.WriteByte('\t')
- default:
- s.end += 1
- s.endRunes += 1
- return s.makeError("Invalid character escape sequence: \\%s.", string(escape))
- }
- s.end += 2
- s.endRunes += 2
- }
- }
- }
-
- return s.makeError("Unterminated string.")
-}
-
-// readBlockString from the input
-//
-// """("?"?(\\"""|\\(?!=""")|[^"\\]))*"""
-func (s *Lexer) readBlockString() (Token, *gqlerror.Error) {
- inputLen := len(s.Input)
-
- var buf bytes.Buffer
-
- // skip the opening quote
- s.start += 3
- s.startRunes += 3
- s.end += 2
- s.endRunes += 2
-
- for s.end < inputLen {
- r := s.Input[s.end]
-
- // Closing triple quote (""")
- if r == '"' && s.end+3 <= inputLen && s.Input[s.end:s.end+3] == `"""` {
- t, err := s.makeValueToken(BlockString, blockStringValue(buf.String()))
-
- // the token should not include the quotes in its value, but should cover them in its position
- t.Pos.Start -= 3
- t.Pos.End += 3
-
- // skip the close quote
- s.end += 3
- s.endRunes += 3
-
- return t, err
- }
-
- // SourceCharacter
- if r < 0x0020 && r != '\t' && r != '\n' && r != '\r' {
- return s.makeError(`Invalid character within String: "\u%04d".`, r)
- }
-
- if r == '\\' && s.end+4 <= inputLen && s.Input[s.end:s.end+4] == `\"""` {
- buf.WriteString(`"""`)
- s.end += 4
- s.endRunes += 4
- } else if r == '\r' {
- if s.end+1 < inputLen && s.Input[s.end+1] == '\n' {
- s.end++
- s.endRunes++
- }
-
- buf.WriteByte('\n')
- s.end++
- s.endRunes++
- } else {
- var char = rune(r)
- var w = 1
-
- // skip unicode overhead if we are in the ascii range
- if r >= 127 {
- char, w = utf8.DecodeRuneInString(s.Input[s.end:])
- }
- s.end += w
- s.endRunes++
- buf.WriteRune(char)
- }
- }
-
- return s.makeError("Unterminated string.")
-}
-
-func unhex(b string) (v rune, ok bool) {
- for _, c := range b {
- v <<= 4
- switch {
- case '0' <= c && c <= '9':
- v |= c - '0'
- case 'a' <= c && c <= 'f':
- v |= c - 'a' + 10
- case 'A' <= c && c <= 'F':
- v |= c - 'A' + 10
- default:
- return 0, false
- }
- }
-
- return v, true
-}
-
-// readName from the input
-//
-// [_A-Za-z][_0-9A-Za-z]*
-func (s *Lexer) readName() (Token, *gqlerror.Error) {
- for s.end < len(s.Input) {
- r, w := s.peek()
-
- if (r >= '0' && r <= '9') || (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' {
- s.end += w
- s.endRunes++
- } else {
- break
- }
- }
-
- return s.makeToken(Name)
-}
diff --git a/vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml b/vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml
deleted file mode 100644
index e2c26696..00000000
--- a/vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml
+++ /dev/null
@@ -1,672 +0,0 @@
-encoding:
- - name: disallows uncommon control characters
- input: "\u0007"
- error:
- message: 'Cannot contain the invalid character "\u0007"'
- locations: [{line: 1, column: 1}]
-
- - name: accepts BOM header
- input: "\uFEFF foo"
- tokens:
- -
- kind: NAME
- start: 2
- end: 5
- value: 'foo'
-
-simple tokens:
- - name: records line and column
- input: "\n \r\n \r foo\n"
- tokens:
- -
- kind: NAME
- start: 8
- end: 11
- line: 4
- column: 3
- value: 'foo'
-
- - name: skips whitespace
- input: "\n\n foo\n\n\n"
- tokens:
- -
- kind: NAME
- start: 6
- end: 9
- value: 'foo'
-
- - name: skips comments
- input: "\n #comment\n foo#comment\n"
- tokens:
- -
- kind: NAME
- start: 18
- end: 21
- value: 'foo'
-
- - name: skips commas
- input: ",,,foo,,,"
- tokens:
- -
- kind: NAME
- start: 3
- end: 6
- value: 'foo'
-
- - name: errors respect whitespace
- input: "\n\n ?\n\n\n"
- error:
- message: 'Cannot parse the unexpected character "?".'
- locations: [{line: 3, column: 5}]
- string: |
- Syntax Error: Cannot parse the unexpected character "?".
- GraphQL request (3:5)
- 2:
- 3: ?
- ^
- 4:
-
- - name: lex reports useful information for dashes in names
- input: "a-b"
- error:
- message: 'Invalid number, expected digit but got: "b".'
- locations: [{ line: 1, column: 3 }]
- tokens:
- -
- kind: Name
- start: 0
- end: 1
- value: a
-
-lexes strings:
- - name: basic
- input: '"simple"'
- tokens:
- -
- kind: STRING
- start: 0
- end: 8
- value: 'simple'
-
- - name: whitespace
- input: '" white space "'
- tokens:
- -
- kind: STRING
- start: 0
- end: 15
- value: ' white space '
-
- - name: quote
- input: '"quote \""'
- tokens:
- -
- kind: STRING
- start: 0
- end: 10
- value: 'quote "'
-
- - name: escaped
- input: '"escaped \n\r\b\t\f"'
- tokens:
- -
- kind: STRING
- start: 0
- end: 20
- value: "escaped \n\r\b\t\f"
-
- - name: slashes
- input: '"slashes \\ \/"'
- tokens:
- -
- kind: STRING
- start: 0
- end: 15
- value: 'slashes \ /'
-
- - name: unicode
- input: '"unicode \u1234\u5678\u90AB\uCDEF"'
- tokens:
- -
- kind: STRING
- start: 0
- end: 34
- value: "unicode \u1234\u5678\u90AB\uCDEF"
-
-lex reports useful string errors:
- - name: unterminated
- input: '"'
- error:
- message: "Unterminated string."
- locations: [{ line: 1, column: 2 }]
-
- - name: no end quote
- input: '"no end quote'
- error:
- message: 'Unterminated string.'
- locations: [{ line: 1, column: 14 }]
-
- - name: single quotes
- input: "'single quotes'"
- error:
- message: "Unexpected single quote character ('), did you mean to use a double quote (\")?"
- locations: [{ line: 1, column: 1 }]
-
- - name: control characters
- input: "\"contains unescaped \u0007 control char\""
- error:
- message: 'Invalid character within String: "\u0007".'
- locations: [{ line: 1, column: 21 }]
-
- - name: null byte
- input: "\"null-byte is not \u0000 end of file\""
- error:
- message: 'Invalid character within String: "\u0000".'
- locations: [{ line: 1, column: 19 }]
-
- - name: unterminated newline
- input: "\"multi\nline\""
- error:
- message: 'Unterminated string.'
- locations: [{line: 1, column: 7 }]
-
- - name: unterminated carriage return
- input: "\"multi\rline\""
- error:
- message: 'Unterminated string.'
- locations: [{ line: 1, column: 7 }]
-
- - name: bad escape character
- input: '"bad \z esc"'
- error:
- message: 'Invalid character escape sequence: \z.'
- locations: [{ line: 1, column: 7 }]
-
- - name: hex escape sequence
- input: '"bad \x esc"'
- error:
- message: 'Invalid character escape sequence: \x.'
- locations: [{ line: 1, column: 7 }]
-
- - name: short escape sequence
- input: '"bad \u1 esc"'
- error:
- message: 'Invalid character escape sequence: \u1 es.'
- locations: [{ line: 1, column: 7 }]
-
- - name: invalid escape sequence 1
- input: '"bad \u0XX1 esc"'
- error:
- message: 'Invalid character escape sequence: \u0XX1.'
- locations: [{ line: 1, column: 7 }]
-
- - name: invalid escape sequence 2
- input: '"bad \uXXXX esc"'
- error:
- message: 'Invalid character escape sequence: \uXXXX.'
- locations: [{ line: 1, column: 7 }]
-
- - name: invalid escape sequence 3
- input: '"bad \uFXXX esc"'
- error:
- message: 'Invalid character escape sequence: \uFXXX.'
- locations: [{ line: 1, column: 7 }]
-
- - name: invalid character escape sequence
- input: '"bad \uXXXF esc"'
- error:
- message: 'Invalid character escape sequence: \uXXXF.'
- locations: [{ line: 1, column: 7 }]
-
-lexes block strings:
- - name: simple
- input: '"""simple"""'
- tokens:
- -
- kind: BLOCK_STRING
- start: 0
- end: 12
- value: 'simple'
-
- - name: white space
- input: '""" white space """'
- tokens:
- -
- kind: BLOCK_STRING
- start: 0
- end: 19
- value: ' white space '
-
- - name: contains quote
- input: '"""contains " quote"""'
- tokens:
- -
- kind: BLOCK_STRING
- start: 0
- end: 22
- value: 'contains " quote'
-
- - name: contains triplequote
- input: "\"\"\"contains \\\"\"\" triplequote\"\"\""
- tokens:
- -
- kind: BLOCK_STRING
- start: 0
- end: 31
- value: 'contains """ triplequote'
-
- - name: multi line
- input: "\"\"\"multi\nline\"\"\""
- tokens:
- -
- kind: BLOCK_STRING
- start: 0
- end: 16
- value: "multi\nline"
-
- - name: multi line normalized
- input: "\"\"\"multi\rline\r\nnormalized\"\"\""
- tokens:
- -
- kind: BLOCK_STRING
- start: 0
- end: 28
- value: "multi\nline\nnormalized"
-
- - name: unescaped
- input: '"""unescaped \n\r\b\t\f\u1234"""'
- tokens:
- -
- kind: BLOCK_STRING
- start: 0
- end: 32
- value: 'unescaped \n\r\b\t\f\u1234'
-
- - name: slashes
- input: '"""slashes \\ \/"""'
- tokens:
- -
- kind: BLOCK_STRING
- start: 0
- end: 19
- value: 'slashes \\ \/'
-
- - name: multiple lines
- input: |
- """
-
- spans
- multiple
- lines
-
- """
- tokens:
- -
- kind: BLOCK_STRING
- start: 0
- end: 36
- value: "spans\n multiple\n lines"
-
-lex reports useful block string errors:
- - name: unterminated string
- input: '"""'
- error:
- message: "Unterminated string."
- locations: [{ line: 1, column: 4 }]
-
- - name: unescaped control characters
- input: "\"\"\"contains unescaped \u0007 control char\"\"\""
- error:
- message: 'Invalid character within String: "\u0007".'
- locations: [{ line: 1, column: 23 }]
-
- - name: null byte
- input: "\"\"\"null-byte is not \u0000 end of file\"\"\""
- error:
- message: 'Invalid character within String: "\u0000".'
- locations: [{ line: 1, column: 21 }]
-
-lexes numbers:
- - name: integer
- input: "4"
- tokens:
- -
- kind: INT
- start: 0
- end: 1
- value: '4'
-
- - name: float
- input: "4.123"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 5
- value: '4.123'
-
- - name: negative
- input: "-4"
- tokens:
- -
- kind: INT
- start: 0
- end: 2
- value: '-4'
-
- - name: nine
- input: "9"
- tokens:
- -
- kind: INT
- start: 0
- end: 1
- value: '9'
-
- - name: zero
- input: "0"
- tokens:
- -
- kind: INT
- start: 0
- end: 1
- value: '0'
-
- - name: negative float
- input: "-4.123"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 6
- value: '-4.123'
-
- - name: float leading zero
- input: "0.123"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 5
- value: '0.123'
-
- - name: exponent whole
- input: "123e4"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 5
- value: '123e4'
-
- - name: exponent uppercase
- input: "123E4"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 5
- value: '123E4'
-
- - name: exponent negative power
- input: "123e-4"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 6
- value: '123e-4'
-
- - name: exponent positive power
- input: "123e+4"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 6
- value: '123e+4'
-
- - name: exponent negative base
- input: "-1.123e4"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 8
- value: '-1.123e4'
-
- - name: exponent negative base upper
- input: "-1.123E4"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 8
- value: '-1.123E4'
-
- - name: exponent negative base negative power
- input: "-1.123e-4"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 9
- value: '-1.123e-4'
-
- - name: exponent negative base positive power
- input: "-1.123e+4"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 9
- value: '-1.123e+4'
-
- - name: exponent negative base large power
- input: "-1.123e4567"
- tokens:
- -
- kind: FLOAT
- start: 0
- end: 11
- value: '-1.123e4567'
-
-lex reports useful number errors:
- - name: zero
- input: "00"
- error:
- message: 'Invalid number, unexpected digit after 0: "0".'
- locations: [{ line: 1, column: 2 }]
-
- - name: positive
- input: "+1"
- error:
- message: 'Cannot parse the unexpected character "+".'
- locations: [{ line: 1, column: 1 }]
-
- - name: trailing dot
- input: "1."
- error:
- message: 'Invalid number, expected digit but got: <EOF>.'
- locations: [{ line: 1, column: 3 }]
-
- - name: traililng dot exponent
- input: "1.e1"
- error:
- message: 'Invalid number, expected digit but got: "e".'
- locations: [{ line: 1, column: 3 }]
-
- - name: missing leading zero
- input: ".123"
- error:
- message: 'Cannot parse the unexpected character ".".'
- locations: [{ line: 1, column: 1 }]
-
- - name: characters
- input: "1.A"
- error:
- message: 'Invalid number, expected digit but got: "A".'
- locations: [{ line: 1, column: 3 }]
-
- - name: negative characters
- input: "-A"
- error:
- message: 'Invalid number, expected digit but got: "A".'
- locations: [{ line: 1, column: 2 }]
-
- - name: missing exponent
- input: '1.0e'
- error:
- message: 'Invalid number, expected digit but got: <EOF>.'
- locations: [{ line: 1, column: 5 }]
-
- - name: character exponent
- input: "1.0eA"
- error:
- message: 'Invalid number, expected digit but got: "A".'
- locations: [{ line: 1, column: 5 }]
-
-lexes punctuation:
- - name: bang
- input: "!"
- tokens:
- -
- kind: BANG
- start: 0
- end: 1
- value: undefined
-
- - name: dollar
- input: "$"
- tokens:
- -
- kind: DOLLAR
- start: 0
- end: 1
- value: undefined
-
- - name: open paren
- input: "("
- tokens:
- -
- kind: PAREN_L
- start: 0
- end: 1
- value: undefined
-
- - name: close paren
- input: ")"
- tokens:
- -
- kind: PAREN_R
- start: 0
- end: 1
- value: undefined
-
- - name: spread
- input: "..."
- tokens:
- -
- kind: SPREAD
- start: 0
- end: 3
- value: undefined
-
- - name: colon
- input: ":"
- tokens:
- -
- kind: COLON
- start: 0
- end: 1
- value: undefined
-
- - name: equals
- input: "="
- tokens:
- -
- kind: EQUALS
- start: 0
- end: 1
- value: undefined
-
- - name: at
- input: "@"
- tokens:
- -
- kind: AT
- start: 0
- end: 1
- value: undefined
-
- - name: open bracket
- input: "["
- tokens:
- -
- kind: BRACKET_L
- start: 0
- end: 1
- value: undefined
-
- - name: close bracket
- input: "]"
- tokens:
- -
- kind: BRACKET_R
- start: 0
- end: 1
- value: undefined
-
- - name: open brace
- input: "{"
- tokens:
- -
- kind: BRACE_L
- start: 0
- end: 1
- value: undefined
-
- - name: close brace
- input: "}"
- tokens:
- -
- kind: BRACE_R
- start: 0
- end: 1
- value: undefined
-
- - name: pipe
- input: "|"
- tokens:
- -
- kind: PIPE
- start: 0
- end: 1
- value: undefined
-
-lex reports useful unknown character error:
- - name: not a spread
- input: ".."
- error:
- message: 'Cannot parse the unexpected character ".".'
- locations: [{ line: 1, column: 1 }]
-
- - name: question mark
- input: "?"
- error:
- message: 'Cannot parse the unexpected character "?".'
- message: 'Cannot parse the unexpected character "?".'
- locations: [{ line: 1, column: 1 }]
-
- - name: unicode 203
- input: "\u203B"
- error:
- message: 'Cannot parse the unexpected character "â".'
- locations: [{ line: 1, column: 1 }]
-
- - name: unicode 200
- input: "\u200b"
- error:
- message: 'Cannot parse the unexpected character "â".'
- locations: [{ line: 1, column: 1 }]
-
diff --git a/vendor/github.com/vektah/gqlparser/lexer/token.go b/vendor/github.com/vektah/gqlparser/lexer/token.go
deleted file mode 100644
index aef8b729..00000000
--- a/vendor/github.com/vektah/gqlparser/lexer/token.go
+++ /dev/null
@@ -1,148 +0,0 @@
-package lexer
-
-import (
- "strconv"
-
- "github.com/vektah/gqlparser/ast"
-)
-
-const (
- Invalid Type = iota
- EOF
- Bang
- Dollar
- Amp
- ParenL
- ParenR
- Spread
- Colon
- Equals
- At
- BracketL
- BracketR
- BraceL
- BraceR
- Pipe
- Name
- Int
- Float
- String
- BlockString
- Comment
-)
-
-func (t Type) Name() string {
- switch t {
- case Invalid:
- return "Invalid"
- case EOF:
- return "EOF"
- case Bang:
- return "Bang"
- case Dollar:
- return "Dollar"
- case Amp:
- return "Amp"
- case ParenL:
- return "ParenL"
- case ParenR:
- return "ParenR"
- case Spread:
- return "Spread"
- case Colon:
- return "Colon"
- case Equals:
- return "Equals"
- case At:
- return "At"
- case BracketL:
- return "BracketL"
- case BracketR:
- return "BracketR"
- case BraceL:
- return "BraceL"
- case BraceR:
- return "BraceR"
- case Pipe:
- return "Pipe"
- case Name:
- return "Name"
- case Int:
- return "Int"
- case Float:
- return "Float"
- case String:
- return "String"
- case BlockString:
- return "BlockString"
- case Comment:
- return "Comment"
- }
- return "Unknown " + strconv.Itoa(int(t))
-}
-
-func (t Type) String() string {
- switch t {
- case Invalid:
- return "<Invalid>"
- case EOF:
- return "<EOF>"
- case Bang:
- return "!"
- case Dollar:
- return "$"
- case Amp:
- return "&"
- case ParenL:
- return "("
- case ParenR:
- return ")"
- case Spread:
- return "..."
- case Colon:
- return ":"
- case Equals:
- return "="
- case At:
- return "@"
- case BracketL:
- return "["
- case BracketR:
- return "]"
- case BraceL:
- return "{"
- case BraceR:
- return "}"
- case Pipe:
- return "|"
- case Name:
- return "Name"
- case Int:
- return "Int"
- case Float:
- return "Float"
- case String:
- return "String"
- case BlockString:
- return "BlockString"
- case Comment:
- return "Comment"
- }
- return "Unknown " + strconv.Itoa(int(t))
-}
-
-// Kind represents a type of token. The types are predefined as constants.
-type Type int
-
-type Token struct {
- Kind Type // The token type.
- Value string // The literal value consumed.
- Pos ast.Position // The file and line this token was read from
-}
-
-func (t Token) String() string {
- if t.Value != "" {
- return t.Kind.String() + " " + strconv.Quote(t.Value)
- }
- return t.Kind.String()
-}
diff --git a/vendor/github.com/vektah/gqlparser/parser/parser.go b/vendor/github.com/vektah/gqlparser/parser/parser.go
deleted file mode 100644
index 96e98402..00000000
--- a/vendor/github.com/vektah/gqlparser/parser/parser.go
+++ /dev/null
@@ -1,136 +0,0 @@
-package parser
-
-import (
- "strconv"
-
- "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
- "github.com/vektah/gqlparser/lexer"
-)
-
-type parser struct {
- lexer lexer.Lexer
- err *gqlerror.Error
-
- peeked bool
- peekToken lexer.Token
- peekError *gqlerror.Error
-
- prev lexer.Token
-}
-
-func (p *parser) peekPos() *ast.Position {
- if p.err != nil {
- return nil
- }
-
- peek := p.peek()
- return &peek.Pos
-}
-
-func (p *parser) peek() lexer.Token {
- if p.err != nil {
- return p.prev
- }
-
- if !p.peeked {
- p.peekToken, p.peekError = p.lexer.ReadToken()
- p.peeked = true
- }
-
- return p.peekToken
-}
-
-func (p *parser) error(tok lexer.Token, format string, args ...interface{}) {
- if p.err != nil {
- return
- }
- p.err = gqlerror.ErrorLocf(tok.Pos.Src.Name, tok.Pos.Line, tok.Pos.Column, format, args...)
-}
-
-func (p *parser) next() lexer.Token {
- if p.err != nil {
- return p.prev
- }
- if p.peeked {
- p.peeked = false
- p.prev, p.err = p.peekToken, p.peekError
- } else {
- p.prev, p.err = p.lexer.ReadToken()
- }
- return p.prev
-}
-
-func (p *parser) expectKeyword(value string) lexer.Token {
- tok := p.peek()
- if tok.Kind == lexer.Name && tok.Value == value {
- return p.next()
- }
-
- p.error(tok, "Expected %s, found %s", strconv.Quote(value), tok.String())
- return tok
-}
-
-func (p *parser) expect(kind lexer.Type) lexer.Token {
- tok := p.peek()
- if tok.Kind == kind {
- return p.next()
- }
-
- p.error(tok, "Expected %s, found %s", kind, tok.Kind.String())
- return tok
-}
-
-func (p *parser) skip(kind lexer.Type) bool {
- if p.err != nil {
- return false
- }
-
- tok := p.peek()
-
- if tok.Kind != kind {
- return false
- }
- p.next()
- return true
-}
-
-func (p *parser) unexpectedError() {
- p.unexpectedToken(p.peek())
-}
-
-func (p *parser) unexpectedToken(tok lexer.Token) {
- p.error(tok, "Unexpected %s", tok.String())
-}
-
-func (p *parser) many(start lexer.Type, end lexer.Type, cb func()) {
- hasDef := p.skip(start)
- if !hasDef {
- return
- }
-
- for p.peek().Kind != end && p.err == nil {
- cb()
- }
- p.next()
-}
-
-func (p *parser) some(start lexer.Type, end lexer.Type, cb func()) {
- hasDef := p.skip(start)
- if !hasDef {
- return
- }
-
- called := false
- for p.peek().Kind != end && p.err == nil {
- called = true
- cb()
- }
-
- if !called {
- p.error(p.peek(), "expected at least one definition, found %s", p.peek().Kind.String())
- return
- }
-
- p.next()
-}
diff --git a/vendor/github.com/vektah/gqlparser/parser/query.go b/vendor/github.com/vektah/gqlparser/parser/query.go
deleted file mode 100644
index 89e1e2e3..00000000
--- a/vendor/github.com/vektah/gqlparser/parser/query.go
+++ /dev/null
@@ -1,348 +0,0 @@
-package parser
-
-import (
- "github.com/vektah/gqlparser/gqlerror"
- "github.com/vektah/gqlparser/lexer"
-
- . "github.com/vektah/gqlparser/ast"
-)
-
-func ParseQuery(source *Source) (*QueryDocument, *gqlerror.Error) {
- p := parser{
- lexer: lexer.New(source),
- }
- return p.parseQueryDocument(), p.err
-}
-
-func (p *parser) parseQueryDocument() *QueryDocument {
- var doc QueryDocument
- for p.peek().Kind != lexer.EOF {
- if p.err != nil {
- return &doc
- }
- doc.Position = p.peekPos()
- switch p.peek().Kind {
- case lexer.Name:
- switch p.peek().Value {
- case "query", "mutation", "subscription":
- doc.Operations = append(doc.Operations, p.parseOperationDefinition())
- case "fragment":
- doc.Fragments = append(doc.Fragments, p.parseFragmentDefinition())
- default:
- p.unexpectedError()
- }
- case lexer.BraceL:
- doc.Operations = append(doc.Operations, p.parseOperationDefinition())
- default:
- p.unexpectedError()
- }
- }
-
- return &doc
-}
-
-func (p *parser) parseOperationDefinition() *OperationDefinition {
- if p.peek().Kind == lexer.BraceL {
- return &OperationDefinition{
- Position: p.peekPos(),
- Operation: Query,
- SelectionSet: p.parseRequiredSelectionSet(),
- }
- }
-
- var od OperationDefinition
- od.Position = p.peekPos()
- od.Operation = p.parseOperationType()
-
- if p.peek().Kind == lexer.Name {
- od.Name = p.next().Value
- }
-
- od.VariableDefinitions = p.parseVariableDefinitions()
- od.Directives = p.parseDirectives(false)
- od.SelectionSet = p.parseRequiredSelectionSet()
-
- return &od
-}
-
-func (p *parser) parseOperationType() Operation {
- tok := p.next()
- switch tok.Value {
- case "query":
- return Query
- case "mutation":
- return Mutation
- case "subscription":
- return Subscription
- }
- p.unexpectedToken(tok)
- return ""
-}
-
-func (p *parser) parseVariableDefinitions() VariableDefinitionList {
- var defs []*VariableDefinition
- p.many(lexer.ParenL, lexer.ParenR, func() {
- defs = append(defs, p.parseVariableDefinition())
- })
-
- return defs
-}
-
-func (p *parser) parseVariableDefinition() *VariableDefinition {
- var def VariableDefinition
- def.Position = p.peekPos()
- def.Variable = p.parseVariable()
-
- p.expect(lexer.Colon)
-
- def.Type = p.parseTypeReference()
-
- if p.skip(lexer.Equals) {
- def.DefaultValue = p.parseValueLiteral(true)
- }
-
- return &def
-}
-
-func (p *parser) parseVariable() string {
- p.expect(lexer.Dollar)
- return p.parseName()
-}
-
-func (p *parser) parseOptionalSelectionSet() SelectionSet {
- var selections []Selection
- p.some(lexer.BraceL, lexer.BraceR, func() {
- selections = append(selections, p.parseSelection())
- })
-
- return SelectionSet(selections)
-}
-
-func (p *parser) parseRequiredSelectionSet() SelectionSet {
- if p.peek().Kind != lexer.BraceL {
- p.error(p.peek(), "Expected %s, found %s", lexer.BraceL, p.peek().Kind.String())
- return nil
- }
-
- var selections []Selection
- p.some(lexer.BraceL, lexer.BraceR, func() {
- selections = append(selections, p.parseSelection())
- })
-
- return SelectionSet(selections)
-}
-
-func (p *parser) parseSelection() Selection {
- if p.peek().Kind == lexer.Spread {
- return p.parseFragment()
- }
- return p.parseField()
-}
-
-func (p *parser) parseField() *Field {
- var field Field
- field.Position = p.peekPos()
- field.Alias = p.parseName()
-
- if p.skip(lexer.Colon) {
- field.Name = p.parseName()
- } else {
- field.Name = field.Alias
- }
-
- field.Arguments = p.parseArguments(false)
- field.Directives = p.parseDirectives(false)
- if p.peek().Kind == lexer.BraceL {
- field.SelectionSet = p.parseOptionalSelectionSet()
- }
-
- return &field
-}
-
-func (p *parser) parseArguments(isConst bool) ArgumentList {
- var arguments ArgumentList
- p.many(lexer.ParenL, lexer.ParenR, func() {
- arguments = append(arguments, p.parseArgument(isConst))
- })
-
- return arguments
-}
-
-func (p *parser) parseArgument(isConst bool) *Argument {
- arg := Argument{}
- arg.Position = p.peekPos()
- arg.Name = p.parseName()
- p.expect(lexer.Colon)
-
- arg.Value = p.parseValueLiteral(isConst)
- return &arg
-}
-
-func (p *parser) parseFragment() Selection {
- p.expect(lexer.Spread)
-
- if peek := p.peek(); peek.Kind == lexer.Name && peek.Value != "on" {
- return &FragmentSpread{
- Position: p.peekPos(),
- Name: p.parseFragmentName(),
- Directives: p.parseDirectives(false),
- }
- }
-
- var def InlineFragment
- def.Position = p.peekPos()
- if p.peek().Value == "on" {
- p.next() // "on"
-
- def.TypeCondition = p.parseName()
- }
-
- def.Directives = p.parseDirectives(false)
- def.SelectionSet = p.parseRequiredSelectionSet()
- return &def
-}
-
-func (p *parser) parseFragmentDefinition() *FragmentDefinition {
- var def FragmentDefinition
- def.Position = p.peekPos()
- p.expectKeyword("fragment")
-
- def.Name = p.parseFragmentName()
- def.VariableDefinition = p.parseVariableDefinitions()
-
- p.expectKeyword("on")
-
- def.TypeCondition = p.parseName()
- def.Directives = p.parseDirectives(false)
- def.SelectionSet = p.parseRequiredSelectionSet()
- return &def
-}
-
-func (p *parser) parseFragmentName() string {
- if p.peek().Value == "on" {
- p.unexpectedError()
- return ""
- }
-
- return p.parseName()
-}
-
-func (p *parser) parseValueLiteral(isConst bool) *Value {
- token := p.peek()
-
- var kind ValueKind
- switch token.Kind {
- case lexer.BracketL:
- return p.parseList(isConst)
- case lexer.BraceL:
- return p.parseObject(isConst)
- case lexer.Dollar:
- if isConst {
- p.unexpectedError()
- return nil
- }
- return &Value{Position: &token.Pos, Raw: p.parseVariable(), Kind: Variable}
- case lexer.Int:
- kind = IntValue
- case lexer.Float:
- kind = FloatValue
- case lexer.String:
- kind = StringValue
- case lexer.BlockString:
- kind = BlockValue
- case lexer.Name:
- switch token.Value {
- case "true", "false":
- kind = BooleanValue
- case "null":
- kind = NullValue
- default:
- kind = EnumValue
- }
- default:
- p.unexpectedError()
- return nil
- }
-
- p.next()
-
- return &Value{Position: &token.Pos, Raw: token.Value, Kind: kind}
-}
-
-func (p *parser) parseList(isConst bool) *Value {
- var values ChildValueList
- pos := p.peekPos()
- p.many(lexer.BracketL, lexer.BracketR, func() {
- values = append(values, &ChildValue{Value: p.parseValueLiteral(isConst)})
- })
-
- return &Value{Children: values, Kind: ListValue, Position: pos}
-}
-
-func (p *parser) parseObject(isConst bool) *Value {
- var fields ChildValueList
- pos := p.peekPos()
- p.many(lexer.BraceL, lexer.BraceR, func() {
- fields = append(fields, p.parseObjectField(isConst))
- })
-
- return &Value{Children: fields, Kind: ObjectValue, Position: pos}
-}
-
-func (p *parser) parseObjectField(isConst bool) *ChildValue {
- field := ChildValue{}
- field.Position = p.peekPos()
- field.Name = p.parseName()
-
- p.expect(lexer.Colon)
-
- field.Value = p.parseValueLiteral(isConst)
- return &field
-}
-
-func (p *parser) parseDirectives(isConst bool) []*Directive {
- var directives []*Directive
-
- for p.peek().Kind == lexer.At {
- if p.err != nil {
- break
- }
- directives = append(directives, p.parseDirective(isConst))
- }
- return directives
-}
-
-func (p *parser) parseDirective(isConst bool) *Directive {
- p.expect(lexer.At)
-
- return &Directive{
- Position: p.peekPos(),
- Name: p.parseName(),
- Arguments: p.parseArguments(isConst),
- }
-}
-
-func (p *parser) parseTypeReference() *Type {
- var typ Type
-
- if p.skip(lexer.BracketL) {
- typ.Position = p.peekPos()
- typ.Elem = p.parseTypeReference()
- p.expect(lexer.BracketR)
- } else {
- typ.Position = p.peekPos()
- typ.NamedType = p.parseName()
- }
-
- if p.skip(lexer.Bang) {
- typ.Position = p.peekPos()
- typ.NonNull = true
- }
- return &typ
-}
-
-func (p *parser) parseName() string {
- token := p.expect(lexer.Name)
-
- return token.Value
-}
diff --git a/vendor/github.com/vektah/gqlparser/parser/query_test.yml b/vendor/github.com/vektah/gqlparser/parser/query_test.yml
deleted file mode 100644
index 902bb15f..00000000
--- a/vendor/github.com/vektah/gqlparser/parser/query_test.yml
+++ /dev/null
@@ -1,520 +0,0 @@
-parser provides useful errors:
- - name: unclosed paren
- input: '{'
- error:
- message: "Expected Name, found <EOF>"
- locations: [{line: 1, column: 2}]
-
- - name: missing on in fragment
- input: |
- { ...MissingOn }
- fragment MissingOn Type
- error:
- message: 'Expected "on", found Name "Type"'
- locations: [{ line: 2, column: 20 }]
-
- - name: missing name after alias
- input: '{ field: {} }'
- error:
- message: "Expected Name, found {"
- locations: [{ line: 1, column: 10 }]
-
- - name: not an operation
- input: 'notanoperation Foo { field }'
- error:
- message: 'Unexpected Name "notanoperation"'
- locations: [{ line: 1, column: 1 }]
-
- - name: a wild splat appears
- input: '...'
- error:
- message: 'Unexpected ...'
- locations: [{ line: 1, column: 1}]
-
-variables:
- - name: are allowed in args
- input: '{ field(complex: { a: { b: [ $var ] } }) }'
-
- - name: are not allowed in default args
- input: 'query Foo($x: Complex = { a: { b: [ $var ] } }) { field }'
- error:
- message: 'Unexpected $'
- locations: [{ line: 1, column: 37 }]
-
-fragments:
- - name: can not be named 'on'
- input: 'fragment on on on { on }'
- error:
- message: 'Unexpected Name "on"'
- locations: [{ line: 1, column: 10 }]
-
- - name: can not spread fragments called 'on'
- input: '{ ...on }'
- error:
- message: 'Expected Name, found }'
- locations: [{ line: 1, column: 9 }]
-
-encoding:
- - name: multibyte characters are supported
- input: |
- # This comment has a ਊ multi-byte character.
- { field(arg: "Has a ਊ multi-byte character.") }
- ast: |
- <QueryDocument>
- Operations: [OperationDefinition]
- - <OperationDefinition>
- Operation: Operation("query")
- SelectionSet: [Selection]
- - <Field>
- Alias: "field"
- Name: "field"
- Arguments: [Argument]
- - <Argument>
- Name: "arg"
- Value: "Has a ਊ multi-byte character."
-
-keywords are allowed anywhere a name is:
- - name: on
- input: |
- query on {
- ... a
- ... on on { field }
- }
- fragment a on Type {
- on(on: $on)
- @on(on: on)
- }
-
- - name: subscription
- input: |
- query subscription {
- ... subscription
- ... on subscription { field }
- }
- fragment subscription on Type {
- subscription(subscription: $subscription)
- @subscription(subscription: subscription)
- }
-
- - name: true
- input: |
- query true {
- ... true
- ... on true { field }
- }
- fragment true on Type {
- true(true: $true)
- @true(true: true)
- }
-
-operations:
- - name: anonymous mutation
- input: 'mutation { mutationField }'
-
- - name: named mutation
- input: 'mutation Foo { mutationField }'
-
- - name: anonymous subscription
- input: 'subscription { subscriptionField }'
-
- - name: named subscription
- input: 'subscription Foo { subscriptionField }'
-
-
-ast:
- - name: simple query
- input: |
- {
- node(id: 4) {
- id,
- name
- }
- }
- ast: |
- <QueryDocument>
- Operations: [OperationDefinition]
- - <OperationDefinition>
- Operation: Operation("query")
- SelectionSet: [Selection]
- - <Field>
- Alias: "node"
- Name: "node"
- Arguments: [Argument]
- - <Argument>
- Name: "id"
- Value: 4
- SelectionSet: [Selection]
- - <Field>
- Alias: "id"
- Name: "id"
- - <Field>
- Alias: "name"
- Name: "name"
-
- - name: nameless query with no variables
- input: |
- query {
- node {
- id
- }
- }
- ast: |
- <QueryDocument>
- Operations: [OperationDefinition]
- - <OperationDefinition>
- Operation: Operation("query")
- SelectionSet: [Selection]
- - <Field>
- Alias: "node"
- Name: "node"
- SelectionSet: [Selection]
- - <Field>
- Alias: "id"
- Name: "id"
-
- - name: fragment defined variables
- input: 'fragment a($v: Boolean = false) on t { f(v: $v) }'
- ast: |
- <QueryDocument>
- Fragments: [FragmentDefinition]
- - <FragmentDefinition>
- Name: "a"
- VariableDefinition: [VariableDefinition]
- - <VariableDefinition>
- Variable: "v"
- Type: Boolean
- DefaultValue: false
- TypeCondition: "t"
- SelectionSet: [Selection]
- - <Field>
- Alias: "f"
- Name: "f"
- Arguments: [Argument]
- - <Argument>
- Name: "v"
- Value: $v
-
-
-values:
- - name: null
- input: '{ f(id: null) }'
- ast: |
- <QueryDocument>
- Operations: [OperationDefinition]
- - <OperationDefinition>
- Operation: Operation("query")
- SelectionSet: [Selection]
- - <Field>
- Alias: "f"
- Name: "f"
- Arguments: [Argument]
- - <Argument>
- Name: "id"
- Value: null
-
- - name: strings
- input: '{ f(long: """long""", short: "short") } '
- ast: |
- <QueryDocument>
- Operations: [OperationDefinition]
- - <OperationDefinition>
- Operation: Operation("query")
- SelectionSet: [Selection]
- - <Field>
- Alias: "f"
- Name: "f"
- Arguments: [Argument]
- - <Argument>
- Name: "long"
- Value: "long"
- - <Argument>
- Name: "short"
- Value: "short"
-
- - name: list
- input: '{ f(id: [1,2]) }'
- ast: |
- <QueryDocument>
- Operations: [OperationDefinition]
- - <OperationDefinition>
- Operation: Operation("query")
- SelectionSet: [Selection]
- - <Field>
- Alias: "f"
- Name: "f"
- Arguments: [Argument]
- - <Argument>
- Name: "id"
- Value: [1,2]
-
-types:
- - name: common types
- input: 'query ($string: String, $int: Int, $arr: [Arr], $notnull: [Arr!]!) { f }'
- ast: |
- <QueryDocument>
- Operations: [OperationDefinition]
- - <OperationDefinition>
- Operation: Operation("query")
- VariableDefinitions: [VariableDefinition]
- - <VariableDefinition>
- Variable: "string"
- Type: String
- - <VariableDefinition>
- Variable: "int"
- Type: Int
- - <VariableDefinition>
- Variable: "arr"
- Type: [Arr]
- - <VariableDefinition>
- Variable: "notnull"
- Type: [Arr!]!
- SelectionSet: [Selection]
- - <Field>
- Alias: "f"
- Name: "f"
-
-large queries:
- - name: kitchen sink
- input: |
- # Copyright (c) 2015-present, Facebook, Inc.
- #
- # This source code is licensed under the MIT license found in the
- # LICENSE file in the root directory of this source tree.
-
- query queryName($foo: ComplexType, $site: Site = MOBILE) {
- whoever123is: node(id: [123, 456]) {
- id ,
- ... on User @defer {
- field2 {
- id ,
- alias: field1(first:10, after:$foo,) @include(if: $foo) {
- id,
- ...frag
- }
- }
- }
- ... @skip(unless: $foo) {
- id
- }
- ... {
- id
- }
- }
- }
-
- mutation likeStory {
- like(story: 123) @defer {
- story {
- id
- }
- }
- }
-
- subscription StoryLikeSubscription($input: StoryLikeSubscribeInput) {
- storyLikeSubscribe(input: $input) {
- story {
- likers {
- count
- }
- likeSentence {
- text
- }
- }
- }
- }
-
- fragment frag on Friend {
- foo(size: $size, bar: $b, obj: {key: "value", block: """
- block string uses \"""
- """})
- }
-
- {
- unnamed(truthy: true, falsey: false, nullish: null),
- query
- }
- ast: |
- <QueryDocument>
- Operations: [OperationDefinition]
- - <OperationDefinition>
- Operation: Operation("query")
- Name: "queryName"
- VariableDefinitions: [VariableDefinition]
- - <VariableDefinition>
- Variable: "foo"
- Type: ComplexType
- - <VariableDefinition>
- Variable: "site"
- Type: Site
- DefaultValue: MOBILE
- SelectionSet: [Selection]
- - <Field>
- Alias: "whoever123is"
- Name: "node"
- Arguments: [Argument]
- - <Argument>
- Name: "id"
- Value: [123,456]
- SelectionSet: [Selection]
- - <Field>
- Alias: "id"
- Name: "id"
- - <InlineFragment>
- TypeCondition: "User"
- Directives: [Directive]
- - <Directive>
- Name: "defer"
- SelectionSet: [Selection]
- - <Field>
- Alias: "field2"
- Name: "field2"
- SelectionSet: [Selection]
- - <Field>
- Alias: "id"
- Name: "id"
- - <Field>
- Alias: "alias"
- Name: "field1"
- Arguments: [Argument]
- - <Argument>
- Name: "first"
- Value: 10
- - <Argument>
- Name: "after"
- Value: $foo
- Directives: [Directive]
- - <Directive>
- Name: "include"
- Arguments: [Argument]
- - <Argument>
- Name: "if"
- Value: $foo
- SelectionSet: [Selection]
- - <Field>
- Alias: "id"
- Name: "id"
- - <FragmentSpread>
- Name: "frag"
- - <InlineFragment>
- Directives: [Directive]
- - <Directive>
- Name: "skip"
- Arguments: [Argument]
- - <Argument>
- Name: "unless"
- Value: $foo
- SelectionSet: [Selection]
- - <Field>
- Alias: "id"
- Name: "id"
- - <InlineFragment>
- SelectionSet: [Selection]
- - <Field>
- Alias: "id"
- Name: "id"
- - <OperationDefinition>
- Operation: Operation("mutation")
- Name: "likeStory"
- SelectionSet: [Selection]
- - <Field>
- Alias: "like"
- Name: "like"
- Arguments: [Argument]
- - <Argument>
- Name: "story"
- Value: 123
- Directives: [Directive]
- - <Directive>
- Name: "defer"
- SelectionSet: [Selection]
- - <Field>
- Alias: "story"
- Name: "story"
- SelectionSet: [Selection]
- - <Field>
- Alias: "id"
- Name: "id"
- - <OperationDefinition>
- Operation: Operation("subscription")
- Name: "StoryLikeSubscription"
- VariableDefinitions: [VariableDefinition]
- - <VariableDefinition>
- Variable: "input"
- Type: StoryLikeSubscribeInput
- SelectionSet: [Selection]
- - <Field>
- Alias: "storyLikeSubscribe"
- Name: "storyLikeSubscribe"
- Arguments: [Argument]
- - <Argument>
- Name: "input"
- Value: $input
- SelectionSet: [Selection]
- - <Field>
- Alias: "story"
- Name: "story"
- SelectionSet: [Selection]
- - <Field>
- Alias: "likers"
- Name: "likers"
- SelectionSet: [Selection]
- - <Field>
- Alias: "count"
- Name: "count"
- - <Field>
- Alias: "likeSentence"
- Name: "likeSentence"
- SelectionSet: [Selection]
- - <Field>
- Alias: "text"
- Name: "text"
- - <OperationDefinition>
- Operation: Operation("query")
- SelectionSet: [Selection]
- - <Field>
- Alias: "unnamed"
- Name: "unnamed"
- Arguments: [Argument]
- - <Argument>
- Name: "truthy"
- Value: true
- - <Argument>
- Name: "falsey"
- Value: false
- - <Argument>
- Name: "nullish"
- Value: null
- - <Field>
- Alias: "query"
- Name: "query"
- Fragments: [FragmentDefinition]
- - <FragmentDefinition>
- Name: "frag"
- TypeCondition: "Friend"
- SelectionSet: [Selection]
- - <Field>
- Alias: "foo"
- Name: "foo"
- Arguments: [Argument]
- - <Argument>
- Name: "size"
- Value: $size
- - <Argument>
- Name: "bar"
- Value: $b
- - <Argument>
- Name: "obj"
- Value: {key:"value",block:"block string uses \"\"\""}
-
-fuzzer:
-- name: 01
- input: '{__typename{...}}'
- error:
- message: 'Expected {, found }'
- locations: [{ line: 1, column: 16 }]
-
-- name: 02
- input: '{...{__typename{...{}}}}'
- error:
- message: 'expected at least one definition, found }'
- locations: [{ line: 1, column: 21 }]
diff --git a/vendor/github.com/vektah/gqlparser/parser/schema.go b/vendor/github.com/vektah/gqlparser/parser/schema.go
deleted file mode 100644
index 5689e433..00000000
--- a/vendor/github.com/vektah/gqlparser/parser/schema.go
+++ /dev/null
@@ -1,527 +0,0 @@
-package parser
-
-import (
- . "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
- "github.com/vektah/gqlparser/lexer"
-)
-
-func ParseSchema(source *Source) (*SchemaDocument, *gqlerror.Error) {
- p := parser{
- lexer: lexer.New(source),
- }
- ast, err := p.parseSchemaDocument(), p.err
- if err != nil {
- return nil, err
- }
-
- for _, def := range ast.Definitions {
- def.BuiltIn = source.BuiltIn
- }
- for _, def := range ast.Extensions {
- def.BuiltIn = source.BuiltIn
- }
-
- return ast, nil
-}
-
-func ParseSchemas(inputs ...*Source) (*SchemaDocument, *gqlerror.Error) {
- ast := &SchemaDocument{}
- for _, input := range inputs {
- inputAst, err := ParseSchema(input)
- if err != nil {
- return nil, err
- }
- ast.Merge(inputAst)
- }
- return ast, nil
-}
-
-func (p *parser) parseSchemaDocument() *SchemaDocument {
- var doc SchemaDocument
- doc.Position = p.peekPos()
- for p.peek().Kind != lexer.EOF {
- if p.err != nil {
- return nil
- }
-
- var description string
- if p.peek().Kind == lexer.BlockString || p.peek().Kind == lexer.String {
- description = p.parseDescription()
- }
-
- if p.peek().Kind != lexer.Name {
- p.unexpectedError()
- break
- }
-
- switch p.peek().Value {
- case "scalar", "type", "interface", "union", "enum", "input":
- doc.Definitions = append(doc.Definitions, p.parseTypeSystemDefinition(description))
- case "schema":
- doc.Schema = append(doc.Schema, p.parseSchemaDefinition(description))
- case "directive":
- doc.Directives = append(doc.Directives, p.parseDirectiveDefinition(description))
- case "extend":
- if description != "" {
- p.unexpectedToken(p.prev)
- }
- p.parseTypeSystemExtension(&doc)
- default:
- p.unexpectedError()
- return nil
- }
- }
-
- return &doc
-}
-
-func (p *parser) parseDescription() string {
- token := p.peek()
-
- if token.Kind != lexer.BlockString && token.Kind != lexer.String {
- return ""
- }
-
- return p.next().Value
-}
-
-func (p *parser) parseTypeSystemDefinition(description string) *Definition {
- tok := p.peek()
- if tok.Kind != lexer.Name {
- p.unexpectedError()
- return nil
- }
-
- switch tok.Value {
- case "scalar":
- return p.parseScalarTypeDefinition(description)
- case "type":
- return p.parseObjectTypeDefinition(description)
- case "interface":
- return p.parseInterfaceTypeDefinition(description)
- case "union":
- return p.parseUnionTypeDefinition(description)
- case "enum":
- return p.parseEnumTypeDefinition(description)
- case "input":
- return p.parseInputObjectTypeDefinition(description)
- default:
- p.unexpectedError()
- return nil
- }
-}
-
-func (p *parser) parseSchemaDefinition(description string) *SchemaDefinition {
- p.expectKeyword("schema")
-
- def := SchemaDefinition{Description: description}
- def.Position = p.peekPos()
- def.Description = description
- def.Directives = p.parseDirectives(true)
-
- p.some(lexer.BraceL, lexer.BraceR, func() {
- def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition())
- })
- return &def
-}
-
-func (p *parser) parseOperationTypeDefinition() *OperationTypeDefinition {
- var op OperationTypeDefinition
- op.Position = p.peekPos()
- op.Operation = p.parseOperationType()
- p.expect(lexer.Colon)
- op.Type = p.parseName()
- return &op
-}
-
-func (p *parser) parseScalarTypeDefinition(description string) *Definition {
- p.expectKeyword("scalar")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Scalar
- def.Description = description
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(true)
- return &def
-}
-
-func (p *parser) parseObjectTypeDefinition(description string) *Definition {
- p.expectKeyword("type")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Object
- def.Description = description
- def.Name = p.parseName()
- def.Interfaces = p.parseImplementsInterfaces()
- def.Directives = p.parseDirectives(true)
- def.Fields = p.parseFieldsDefinition()
- return &def
-}
-
-func (p *parser) parseImplementsInterfaces() []string {
- var types []string
- if p.peek().Value == "implements" {
- p.next()
- // optional leading ampersand
- p.skip(lexer.Amp)
-
- types = append(types, p.parseName())
- for p.skip(lexer.Amp) && p.err == nil {
- types = append(types, p.parseName())
- }
- }
- return types
-}
-
-func (p *parser) parseFieldsDefinition() FieldList {
- var defs FieldList
- p.some(lexer.BraceL, lexer.BraceR, func() {
- defs = append(defs, p.parseFieldDefinition())
- })
- return defs
-}
-
-func (p *parser) parseFieldDefinition() *FieldDefinition {
- var def FieldDefinition
- def.Position = p.peekPos()
- def.Description = p.parseDescription()
- def.Name = p.parseName()
- def.Arguments = p.parseArgumentDefs()
- p.expect(lexer.Colon)
- def.Type = p.parseTypeReference()
- def.Directives = p.parseDirectives(true)
-
- return &def
-}
-
-func (p *parser) parseArgumentDefs() ArgumentDefinitionList {
- var args ArgumentDefinitionList
- p.some(lexer.ParenL, lexer.ParenR, func() {
- args = append(args, p.parseArgumentDef())
- })
- return args
-}
-
-func (p *parser) parseArgumentDef() *ArgumentDefinition {
- var def ArgumentDefinition
- def.Position = p.peekPos()
- def.Description = p.parseDescription()
- def.Name = p.parseName()
- p.expect(lexer.Colon)
- def.Type = p.parseTypeReference()
- if p.skip(lexer.Equals) {
- def.DefaultValue = p.parseValueLiteral(true)
- }
- def.Directives = p.parseDirectives(true)
- return &def
-}
-
-func (p *parser) parseInputValueDef() *FieldDefinition {
- var def FieldDefinition
- def.Position = p.peekPos()
- def.Description = p.parseDescription()
- def.Name = p.parseName()
- p.expect(lexer.Colon)
- def.Type = p.parseTypeReference()
- if p.skip(lexer.Equals) {
- def.DefaultValue = p.parseValueLiteral(true)
- }
- def.Directives = p.parseDirectives(true)
- return &def
-}
-
-func (p *parser) parseInterfaceTypeDefinition(description string) *Definition {
- p.expectKeyword("interface")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Interface
- def.Description = description
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(true)
- def.Fields = p.parseFieldsDefinition()
- return &def
-}
-
-func (p *parser) parseUnionTypeDefinition(description string) *Definition {
- p.expectKeyword("union")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Union
- def.Description = description
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(true)
- def.Types = p.parseUnionMemberTypes()
- return &def
-}
-
-func (p *parser) parseUnionMemberTypes() []string {
- var types []string
- if p.skip(lexer.Equals) {
- // optional leading pipe
- p.skip(lexer.Pipe)
-
- types = append(types, p.parseName())
- for p.skip(lexer.Pipe) && p.err == nil {
- types = append(types, p.parseName())
- }
- }
- return types
-}
-
-func (p *parser) parseEnumTypeDefinition(description string) *Definition {
- p.expectKeyword("enum")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Enum
- def.Description = description
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(true)
- def.EnumValues = p.parseEnumValuesDefinition()
- return &def
-}
-
-func (p *parser) parseEnumValuesDefinition() EnumValueList {
- var values EnumValueList
- p.some(lexer.BraceL, lexer.BraceR, func() {
- values = append(values, p.parseEnumValueDefinition())
- })
- return values
-}
-
-func (p *parser) parseEnumValueDefinition() *EnumValueDefinition {
- return &EnumValueDefinition{
- Position: p.peekPos(),
- Description: p.parseDescription(),
- Name: p.parseName(),
- Directives: p.parseDirectives(true),
- }
-}
-
-func (p *parser) parseInputObjectTypeDefinition(description string) *Definition {
- p.expectKeyword("input")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = InputObject
- def.Description = description
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(true)
- def.Fields = p.parseInputFieldsDefinition()
- return &def
-}
-
-func (p *parser) parseInputFieldsDefinition() FieldList {
- var values FieldList
- p.some(lexer.BraceL, lexer.BraceR, func() {
- values = append(values, p.parseInputValueDef())
- })
- return values
-}
-
-func (p *parser) parseTypeSystemExtension(doc *SchemaDocument) {
- p.expectKeyword("extend")
-
- switch p.peek().Value {
- case "schema":
- doc.SchemaExtension = append(doc.SchemaExtension, p.parseSchemaExtension())
- case "scalar":
- doc.Extensions = append(doc.Extensions, p.parseScalarTypeExtension())
- case "type":
- doc.Extensions = append(doc.Extensions, p.parseObjectTypeExtension())
- case "interface":
- doc.Extensions = append(doc.Extensions, p.parseInterfaceTypeExtension())
- case "union":
- doc.Extensions = append(doc.Extensions, p.parseUnionTypeExtension())
- case "enum":
- doc.Extensions = append(doc.Extensions, p.parseEnumTypeExtension())
- case "input":
- doc.Extensions = append(doc.Extensions, p.parseInputObjectTypeExtension())
- default:
- p.unexpectedError()
- }
-}
-
-func (p *parser) parseSchemaExtension() *SchemaDefinition {
- p.expectKeyword("schema")
-
- var def SchemaDefinition
- def.Position = p.peekPos()
- def.Directives = p.parseDirectives(true)
- p.some(lexer.BraceL, lexer.BraceR, func() {
- def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition())
- })
- if len(def.Directives) == 0 && len(def.OperationTypes) == 0 {
- p.unexpectedError()
- }
- return &def
-}
-
-func (p *parser) parseScalarTypeExtension() *Definition {
- p.expectKeyword("scalar")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Scalar
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(true)
- if len(def.Directives) == 0 {
- p.unexpectedError()
- }
- return &def
-}
-
-func (p *parser) parseObjectTypeExtension() *Definition {
- p.expectKeyword("type")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Object
- def.Name = p.parseName()
- def.Interfaces = p.parseImplementsInterfaces()
- def.Directives = p.parseDirectives(true)
- def.Fields = p.parseFieldsDefinition()
- if len(def.Interfaces) == 0 && len(def.Directives) == 0 && len(def.Fields) == 0 {
- p.unexpectedError()
- }
- return &def
-}
-
-func (p *parser) parseInterfaceTypeExtension() *Definition {
- p.expectKeyword("interface")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Interface
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(true)
- def.Fields = p.parseFieldsDefinition()
- if len(def.Directives) == 0 && len(def.Fields) == 0 {
- p.unexpectedError()
- }
- return &def
-}
-
-func (p *parser) parseUnionTypeExtension() *Definition {
- p.expectKeyword("union")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Union
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(true)
- def.Types = p.parseUnionMemberTypes()
-
- if len(def.Directives) == 0 && len(def.Types) == 0 {
- p.unexpectedError()
- }
- return &def
-}
-
-func (p *parser) parseEnumTypeExtension() *Definition {
- p.expectKeyword("enum")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = Enum
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(true)
- def.EnumValues = p.parseEnumValuesDefinition()
- if len(def.Directives) == 0 && len(def.EnumValues) == 0 {
- p.unexpectedError()
- }
- return &def
-}
-
-func (p *parser) parseInputObjectTypeExtension() *Definition {
- p.expectKeyword("input")
-
- var def Definition
- def.Position = p.peekPos()
- def.Kind = InputObject
- def.Name = p.parseName()
- def.Directives = p.parseDirectives(false)
- def.Fields = p.parseInputFieldsDefinition()
- if len(def.Directives) == 0 && len(def.Fields) == 0 {
- p.unexpectedError()
- }
- return &def
-}
-
-func (p *parser) parseDirectiveDefinition(description string) *DirectiveDefinition {
- p.expectKeyword("directive")
- p.expect(lexer.At)
-
- var def DirectiveDefinition
- def.Position = p.peekPos()
- def.Description = description
- def.Name = p.parseName()
- def.Arguments = p.parseArgumentDefs()
-
- p.expectKeyword("on")
- def.Locations = p.parseDirectiveLocations()
- return &def
-}
-
-func (p *parser) parseDirectiveLocations() []DirectiveLocation {
- p.skip(lexer.Pipe)
-
- locations := []DirectiveLocation{p.parseDirectiveLocation()}
-
- for p.skip(lexer.Pipe) && p.err == nil {
- locations = append(locations, p.parseDirectiveLocation())
- }
-
- return locations
-}
-
-func (p *parser) parseDirectiveLocation() DirectiveLocation {
- name := p.expect(lexer.Name)
-
- switch name.Value {
- case `QUERY`:
- return LocationQuery
- case `MUTATION`:
- return LocationMutation
- case `SUBSCRIPTION`:
- return LocationSubscription
- case `FIELD`:
- return LocationField
- case `FRAGMENT_DEFINITION`:
- return LocationFragmentDefinition
- case `FRAGMENT_SPREAD`:
- return LocationFragmentSpread
- case `INLINE_FRAGMENT`:
- return LocationInlineFragment
- case `SCHEMA`:
- return LocationSchema
- case `SCALAR`:
- return LocationScalar
- case `OBJECT`:
- return LocationObject
- case `FIELD_DEFINITION`:
- return LocationFieldDefinition
- case `ARGUMENT_DEFINITION`:
- return LocationArgumentDefinition
- case `INTERFACE`:
- return LocationInterface
- case `UNION`:
- return LocationUnion
- case `ENUM`:
- return LocationEnum
- case `ENUM_VALUE`:
- return LocationEnumValue
- case `INPUT_OBJECT`:
- return LocationInputObject
- case `INPUT_FIELD_DEFINITION`:
- return LocationInputFieldDefinition
- }
-
- p.unexpectedToken(name)
- return ""
-}
diff --git a/vendor/github.com/vektah/gqlparser/parser/schema_test.yml b/vendor/github.com/vektah/gqlparser/parser/schema_test.yml
deleted file mode 100644
index 394bd363..00000000
--- a/vendor/github.com/vektah/gqlparser/parser/schema_test.yml
+++ /dev/null
@@ -1,540 +0,0 @@
-object types:
- - name: simple
- input: |
- type Hello {
- world: String
- }
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Type: String
-
- - name: with description
- input: |
- "Description"
- type Hello {
- world: String
- }
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Description: "Description"
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Type: String
-
- - name: with block description
- input: |
- """
- Description
- """
- # Even with comments between them
- type Hello {
- world: String
- }
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Description: "Description"
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Type: String
- - name: with field arg
- input: |
- type Hello {
- world(flag: Boolean): String
- }
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Arguments: [ArgumentDefinition]
- - <ArgumentDefinition>
- Name: "flag"
- Type: Boolean
- Type: String
-
- - name: with field arg and default value
- input: |
- type Hello {
- world(flag: Boolean = true): String
- }
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Arguments: [ArgumentDefinition]
- - <ArgumentDefinition>
- Name: "flag"
- DefaultValue: true
- Type: Boolean
- Type: String
-
- - name: with field list arg
- input: |
- type Hello {
- world(things: [String]): String
- }
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Arguments: [ArgumentDefinition]
- - <ArgumentDefinition>
- Name: "things"
- Type: [String]
- Type: String
-
- - name: with two args
- input: |
- type Hello {
- world(argOne: Boolean, argTwo: Int): String
- }
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Arguments: [ArgumentDefinition]
- - <ArgumentDefinition>
- Name: "argOne"
- Type: Boolean
- - <ArgumentDefinition>
- Name: "argTwo"
- Type: Int
- Type: String
- - name: must define one or more fields
- input: |
- type Hello {}
- error:
- message: "expected at least one definition, found }"
- locations: [{ line: 1, column: 13 }]
-
-type extensions:
- - name: Object extension
- input: |
- extend type Hello {
- world: String
- }
- ast: |
- <SchemaDocument>
- Extensions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Type: String
-
- - name: without any fields
- input: "extend type Hello implements Greeting"
- ast: |
- <SchemaDocument>
- Extensions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Interfaces: [string]
- - "Greeting"
-
- - name: without fields twice
- input: |
- extend type Hello implements Greeting
- extend type Hello implements SecondGreeting
- ast: |
- <SchemaDocument>
- Extensions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Interfaces: [string]
- - "Greeting"
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Interfaces: [string]
- - "SecondGreeting"
-
- - name: without anything errors
- input: "extend type Hello"
- error:
- message: "Unexpected <EOF>"
- locations: [{ line: 1, column: 18 }]
-
- - name: can have descriptions # hmm, this might not be spec compliant...
- input: |
- "Description"
- extend type Hello {
- world: String
- }
- error:
- message: 'Unexpected String "Description"'
- locations: [{ line: 1, column: 2 }]
-
- - name: can not have descriptions on types
- input: |
- extend "Description" type Hello {
- world: String
- }
- error:
- message: Unexpected String "Description"
- locations: [{ line: 1, column: 9 }]
-
-schema definition:
- - name: simple
- input: |
- schema {
- query: Query
- }
- ast: |
- <SchemaDocument>
- Schema: [SchemaDefinition]
- - <SchemaDefinition>
- OperationTypes: [OperationTypeDefinition]
- - <OperationTypeDefinition>
- Operation: Operation("query")
- Type: "Query"
-
-schema extensions:
- - name: simple
- input: |
- extend schema {
- mutation: Mutation
- }
- ast: |
- <SchemaDocument>
- SchemaExtension: [SchemaDefinition]
- - <SchemaDefinition>
- OperationTypes: [OperationTypeDefinition]
- - <OperationTypeDefinition>
- Operation: Operation("mutation")
- Type: "Mutation"
-
- - name: directive only
- input: "extend schema @directive"
- ast: |
- <SchemaDocument>
- SchemaExtension: [SchemaDefinition]
- - <SchemaDefinition>
- Directives: [Directive]
- - <Directive>
- Name: "directive"
-
- - name: without anything errors
- input: "extend schema"
- error:
- message: "Unexpected <EOF>"
- locations: [{ line: 1, column: 14}]
-
-type extensions:
- - name: all can have directives
- input: |
- extend scalar Foo @deprecated
- extend type Foo @deprecated
- extend interface Foo @deprecated
- extend union Foo @deprecated
- extend enum Foo @deprecated
- extend input Foo @deprecated
- ast: |
- <SchemaDocument>
- Extensions: [Definition]
- - <Definition>
- Kind: DefinitionKind("SCALAR")
- Name: "Foo"
- Directives: [Directive]
- - <Directive>
- Name: "deprecated"
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Foo"
- Directives: [Directive]
- - <Directive>
- Name: "deprecated"
- - <Definition>
- Kind: DefinitionKind("INTERFACE")
- Name: "Foo"
- Directives: [Directive]
- - <Directive>
- Name: "deprecated"
- - <Definition>
- Kind: DefinitionKind("UNION")
- Name: "Foo"
- Directives: [Directive]
- - <Directive>
- Name: "deprecated"
- - <Definition>
- Kind: DefinitionKind("ENUM")
- Name: "Foo"
- Directives: [Directive]
- - <Directive>
- Name: "deprecated"
- - <Definition>
- Kind: DefinitionKind("INPUT_OBJECT")
- Name: "Foo"
- Directives: [Directive]
- - <Directive>
- Name: "deprecated"
-
-
-inheritance:
- - name: single
- input: "type Hello implements World { field: String }"
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Interfaces: [string]
- - "World"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "field"
- Type: String
-
- - name: multi
- input: "type Hello implements Wo & rld { field: String }"
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Interfaces: [string]
- - "Wo"
- - "rld"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "field"
- Type: String
-
- - name: multi with leading amp
- input: "type Hello implements & Wo & rld { field: String }"
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("OBJECT")
- Name: "Hello"
- Interfaces: [string]
- - "Wo"
- - "rld"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "field"
- Type: String
-
-enums:
- - name: single value
- input: "enum Hello { WORLD }"
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("ENUM")
- Name: "Hello"
- EnumValues: [EnumValueDefinition]
- - <EnumValueDefinition>
- Name: "WORLD"
-
- - name: double value
- input: "enum Hello { WO, RLD }"
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("ENUM")
- Name: "Hello"
- EnumValues: [EnumValueDefinition]
- - <EnumValueDefinition>
- Name: "WO"
- - <EnumValueDefinition>
- Name: "RLD"
- - name: must define one or more unique enum values
- input: |
- enum Hello {}
- error:
- message: "expected at least one definition, found }"
- locations: [{ line: 1, column: 13 }]
-
-interface:
- - name: simple
- input: |
- interface Hello {
- world: String
- }
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("INTERFACE")
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Type: String
- - name: must define one or more fields
- input: |
- interface Hello {}
- error:
- message: "expected at least one definition, found }"
- locations: [{ line: 1, column: 18 }]
-
-unions:
- - name: simple
- input: "union Hello = World"
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("UNION")
- Name: "Hello"
- Types: [string]
- - "World"
-
- - name: with two types
- input: "union Hello = Wo | Rld"
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("UNION")
- Name: "Hello"
- Types: [string]
- - "Wo"
- - "Rld"
-
- - name: with leading pipe
- input: "union Hello = | Wo | Rld"
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("UNION")
- Name: "Hello"
- Types: [string]
- - "Wo"
- - "Rld"
-
- - name: cant be empty
- input: "union Hello = || Wo | Rld"
- error:
- message: "Expected Name, found |"
- locations: [{ line: 1, column: 16 }]
-
- - name: cant double pipe
- input: "union Hello = Wo || Rld"
- error:
- message: "Expected Name, found |"
- locations: [{ line: 1, column: 19 }]
-
- - name: cant have trailing pipe
- input: "union Hello = | Wo | Rld |"
- error:
- message: "Expected Name, found <EOF>"
- locations: [{ line: 1, column: 27 }]
-
-scalar:
- - name: simple
- input: "scalar Hello"
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("SCALAR")
- Name: "Hello"
-
-input object:
- - name: simple
- input: |
- input Hello {
- world: String
- }
- ast: |
- <SchemaDocument>
- Definitions: [Definition]
- - <Definition>
- Kind: DefinitionKind("INPUT_OBJECT")
- Name: "Hello"
- Fields: [FieldDefinition]
- - <FieldDefinition>
- Name: "world"
- Type: String
-
- - name: can not have args
- input: |
- input Hello {
- world(foo: Int): String
- }
- error:
- message: "Expected :, found ("
- locations: [{ line: 2, column: 8 }]
- - name: must define one or more input fields
- input: |
- input Hello {}
- error:
- message: "expected at least one definition, found }"
- locations: [{ line: 1, column: 14 }]
-
-directives:
- - name: simple
- input: directive @foo on FIELD
- ast: |
- <SchemaDocument>
- Directives: [DirectiveDefinition]
- - <DirectiveDefinition>
- Name: "foo"
- Locations: [DirectiveLocation]
- - DirectiveLocation("FIELD")
-
- - name: invalid location
- input: "directive @foo on FIELD | INCORRECT_LOCATION"
- error:
- message: 'Unexpected Name "INCORRECT_LOCATION"'
- locations: [{ line: 1, column: 27 }]
-
-fuzzer:
- - name: 1
- input: "type o{d(g:["
- error:
- message: 'Expected Name, found <EOF>'
- locations: [{ line: 1, column: 13 }]
- - name: 2
- input: "\"\"\"\r"
- error:
- message: 'Unexpected <Invalid>'
- locations: [{ line: 1, column: 5 }]
diff --git a/vendor/github.com/vektah/gqlparser/readme.md b/vendor/github.com/vektah/gqlparser/readme.md
deleted file mode 100644
index 976d202b..00000000
--- a/vendor/github.com/vektah/gqlparser/readme.md
+++ /dev/null
@@ -1,17 +0,0 @@
-gqlparser [![CircleCI](https://badgen.net/circleci/github/vektah/gqlparser/master)](https://circleci.com/gh/vektah/gqlparser) [![Go Report Card](https://goreportcard.com/badge/github.com/vektah/gqlparser)](https://goreportcard.com/report/github.com/vektah/gqlparser) [![Coverage Status](https://badgen.net/coveralls/c/github/vektah/gqlparser)](https://coveralls.io/github/vektah/gqlparser?branch=master)
-===
-
-This is a parser for graphql, written to mirror the graphql-js reference implementation as closely while remaining idiomatic and easy to use.
-
-spec target: June 2018 (Schema definition language, block strings as descriptions, error paths & extension)
-
-This parser is used by [gqlgen](https://github.com/99designs/gqlgen), and it should be reasonablly stable.
-
-Guiding principles:
-
- - maintainability: It should be easy to stay up to date with the spec
- - well tested: It shouldnt need a graphql server to validate itself. Changes to this repo should be self contained.
- - server agnostic: It should be usable by any of the graphql server implementations, and any graphql client tooling.
- - idiomatic & stable api: It should follow go best practices, especially around forwards compatibility.
- - fast: Where it doesnt impact on the above it should be fast. Avoid unnecessary allocs in hot paths.
- - close to reference: Where it doesnt impact on the above, it should stay close to the [graphql/graphql-js](github.com/graphql/graphql-js) reference implementation.
diff --git a/vendor/github.com/vektah/gqlparser/validator/error.go b/vendor/github.com/vektah/gqlparser/validator/error.go
deleted file mode 100644
index f354dee5..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/error.go
+++ /dev/null
@@ -1,55 +0,0 @@
-package validator
-
-import (
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
-)
-
-type ErrorOption func(err *gqlerror.Error)
-
-func Message(msg string, args ...interface{}) ErrorOption {
- return func(err *gqlerror.Error) {
- err.Message += fmt.Sprintf(msg, args...)
- }
-}
-
-func At(position *ast.Position) ErrorOption {
- return func(err *gqlerror.Error) {
- if position == nil {
- return
- }
- err.Locations = append(err.Locations, gqlerror.Location{
- Line: position.Line,
- Column: position.Column,
- })
- if position.Src.Name != "" {
- err.SetFile(position.Src.Name)
- }
- }
-}
-
-func SuggestListQuoted(prefix string, typed string, suggestions []string) ErrorOption {
- suggested := SuggestionList(typed, suggestions)
- return func(err *gqlerror.Error) {
- if len(suggested) > 0 {
- err.Message += " " + prefix + " " + QuotedOrList(suggested...) + "?"
- }
- }
-}
-
-func SuggestListUnquoted(prefix string, typed string, suggestions []string) ErrorOption {
- suggested := SuggestionList(typed, suggestions)
- return func(err *gqlerror.Error) {
- if len(suggested) > 0 {
- err.Message += " " + prefix + " " + OrList(suggested...) + "?"
- }
- }
-}
-
-func Suggestf(suggestion string, args ...interface{}) ErrorOption {
- return func(err *gqlerror.Error) {
- err.Message += " Did you mean " + fmt.Sprintf(suggestion, args...) + "?"
- }
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE b/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE
deleted file mode 100644
index fce4519e..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE
+++ /dev/null
@@ -1,33 +0,0 @@
-The files in this testdata directory are derived from the graphql-js project:
-https://github.com/graphql/graphql-js
-
-BSD License
-
-For GraphQL software
-
-Copyright (c) 2015, Facebook, Inc. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-
- * Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
-
- * Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
-
- * Neither the name Facebook nor the names of its contributors may be used to
- endorse or promote products derived from this software without specific
- prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file
diff --git a/vendor/github.com/vektah/gqlparser/validator/messaging.go b/vendor/github.com/vektah/gqlparser/validator/messaging.go
deleted file mode 100644
index f1ab5873..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/messaging.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package validator
-
-import "bytes"
-
-// Given [ A, B, C ] return '"A", "B", or "C"'.
-func QuotedOrList(items ...string) string {
- itemsQuoted := make([]string, len(items))
- for i, item := range items {
- itemsQuoted[i] = `"` + item + `"`
- }
- return OrList(itemsQuoted...)
-}
-
-// Given [ A, B, C ] return 'A, B, or C'.
-func OrList(items ...string) string {
- var buf bytes.Buffer
-
- if len(items) > 5 {
- items = items[:5]
- }
- if len(items) == 2 {
- buf.WriteString(items[0])
- buf.WriteString(" or ")
- buf.WriteString(items[1])
- return buf.String()
- }
-
- for i, item := range items {
- if i != 0 {
- if i == len(items)-1 {
- buf.WriteString(", or ")
- } else {
- buf.WriteString(", ")
- }
- }
- buf.WriteString(item)
- }
- return buf.String()
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/prelude.go b/vendor/github.com/vektah/gqlparser/validator/prelude.go
deleted file mode 100644
index c7a4d35b..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/prelude.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package validator
-
-import "github.com/vektah/gqlparser/ast"
-
-var Prelude = &ast.Source{
- Name: "prelude.graphql",
- Input: "# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema\n\n# The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.\nscalar Int\n\n# The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).\nscalar Float\n\n# The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.\nscalar String\n\n# The `Boolean` scalar type represents ` + \"`\" + `true` + \"`\" + ` or ` + \"`\" + `false` + \"`\" + `.\nscalar Boolean\n\n# The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as \"4\") or integer (such as 4) input value will be accepted as an ID.\nscalar ID\n\n# The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument.\ndirective @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT\n\n# The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument.\ndirective @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT\n\n# The @deprecated directive is used within the type system definition language to indicate deprecated portions of a GraphQL service’s schema, such as deprecated fields on a type or deprecated enum values.\ndirective @deprecated(reason: String = \"No longer supported\") on FIELD_DEFINITION | ENUM_VALUE\n\ntype __Schema {\n types: [__Type!]!\n queryType: __Type!\n mutationType: __Type\n subscriptionType: __Type\n directives: [__Directive!]!\n}\n\ntype __Type {\n kind: __TypeKind!\n name: String\n description: String\n\n # OBJECT and INTERFACE only\n fields(includeDeprecated: Boolean = false): [__Field!]\n\n # OBJECT only\n interfaces: [__Type!]\n\n # INTERFACE and UNION only\n possibleTypes: [__Type!]\n\n # ENUM only\n enumValues(includeDeprecated: Boolean = false): [__EnumValue!]\n\n # INPUT_OBJECT only\n inputFields: [__InputValue!]\n\n # NON_NULL and LIST only\n ofType: __Type\n}\n\ntype __Field {\n name: String!\n description: String\n args: [__InputValue!]!\n type: __Type!\n isDeprecated: Boolean!\n deprecationReason: String\n}\n\ntype __InputValue {\n name: String!\n description: String\n type: __Type!\n defaultValue: String\n}\n\ntype __EnumValue {\n name: String!\n description: String\n isDeprecated: Boolean!\n deprecationReason: String\n}\n\nenum __TypeKind {\n SCALAR\n OBJECT\n INTERFACE\n UNION\n ENUM\n INPUT_OBJECT\n LIST\n NON_NULL\n}\n\ntype __Directive {\n name: String!\n description: String\n locations: [__DirectiveLocation!]!\n args: [__InputValue!]!\n}\n\nenum __DirectiveLocation {\n QUERY\n MUTATION\n SUBSCRIPTION\n FIELD\n FRAGMENT_DEFINITION\n FRAGMENT_SPREAD\n INLINE_FRAGMENT\n SCHEMA\n SCALAR\n OBJECT\n FIELD_DEFINITION\n ARGUMENT_DEFINITION\n INTERFACE\n UNION\n ENUM\n ENUM_VALUE\n INPUT_OBJECT\n INPUT_FIELD_DEFINITION\n}\n",
- BuiltIn: true,
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/prelude.graphql b/vendor/github.com/vektah/gqlparser/validator/prelude.graphql
deleted file mode 100644
index 2c7f7c02..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/prelude.graphql
+++ /dev/null
@@ -1,119 +0,0 @@
-# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema
-
-# The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
-scalar Int
-
-# The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
-scalar Float
-
-# The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
-scalar String
-
-# The `Boolean` scalar type represents ` + "`" + `true` + "`" + ` or ` + "`" + `false` + "`" + `.
-scalar Boolean
-
-# The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as "4") or integer (such as 4) input value will be accepted as an ID.
-scalar ID
-
-# The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument.
-directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
-
-# The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument.
-directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
-
-# The @deprecated directive is used within the type system definition language to indicate deprecated portions of a GraphQL service’s schema, such as deprecated fields on a type or deprecated enum values.
-directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITION | ENUM_VALUE
-
-type __Schema {
- types: [__Type!]!
- queryType: __Type!
- mutationType: __Type
- subscriptionType: __Type
- directives: [__Directive!]!
-}
-
-type __Type {
- kind: __TypeKind!
- name: String
- description: String
-
- # OBJECT and INTERFACE only
- fields(includeDeprecated: Boolean = false): [__Field!]
-
- # OBJECT only
- interfaces: [__Type!]
-
- # INTERFACE and UNION only
- possibleTypes: [__Type!]
-
- # ENUM only
- enumValues(includeDeprecated: Boolean = false): [__EnumValue!]
-
- # INPUT_OBJECT only
- inputFields: [__InputValue!]
-
- # NON_NULL and LIST only
- ofType: __Type
-}
-
-type __Field {
- name: String!
- description: String
- args: [__InputValue!]!
- type: __Type!
- isDeprecated: Boolean!
- deprecationReason: String
-}
-
-type __InputValue {
- name: String!
- description: String
- type: __Type!
- defaultValue: String
-}
-
-type __EnumValue {
- name: String!
- description: String
- isDeprecated: Boolean!
- deprecationReason: String
-}
-
-enum __TypeKind {
- SCALAR
- OBJECT
- INTERFACE
- UNION
- ENUM
- INPUT_OBJECT
- LIST
- NON_NULL
-}
-
-type __Directive {
- name: String!
- description: String
- locations: [__DirectiveLocation!]!
- args: [__InputValue!]!
-}
-
-enum __DirectiveLocation {
- QUERY
- MUTATION
- SUBSCRIPTION
- FIELD
- FRAGMENT_DEFINITION
- FRAGMENT_SPREAD
- INLINE_FRAGMENT
- SCHEMA
- SCALAR
- OBJECT
- FIELD_DEFINITION
- ARGUMENT_DEFINITION
- INTERFACE
- UNION
- ENUM
- ENUM_VALUE
- INPUT_OBJECT
- INPUT_FIELD_DEFINITION
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
deleted file mode 100644
index 69148d52..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
+++ /dev/null
@@ -1,86 +0,0 @@
-package validator
-
-import (
- "fmt"
- "sort"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("FieldsOnCorrectType", func(observers *Events, addError AddErrFunc) {
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if field.ObjectDefinition == nil || field.Definition != nil {
- return
- }
-
- message := fmt.Sprintf(`Cannot query field "%s" on type "%s".`, field.Name, field.ObjectDefinition.Name)
-
- if suggestedTypeNames := getSuggestedTypeNames(walker, field.ObjectDefinition, field.Name); suggestedTypeNames != nil {
- message += " Did you mean to use an inline fragment on " + QuotedOrList(suggestedTypeNames...) + "?"
- } else if suggestedFieldNames := getSuggestedFieldNames(field.ObjectDefinition, field.Name); suggestedFieldNames != nil {
- message += " Did you mean " + QuotedOrList(suggestedFieldNames...) + "?"
- }
-
- addError(
- Message(message),
- At(field.Position),
- )
- })
- })
-}
-
-// Go through all of the implementations of type, as well as the interfaces
-// that they implement. If any of those types include the provided field,
-// suggest them, sorted by how often the type is referenced, starting
-// with Interfaces.
-func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) []string {
- if !parent.IsAbstractType() {
- return nil
- }
-
- var suggestedObjectTypes []string
- var suggestedInterfaceTypes []string
- interfaceUsageCount := map[string]int{}
-
- for _, possibleType := range walker.Schema.GetPossibleTypes(parent) {
- field := possibleType.Fields.ForName(name)
- if field == nil {
- continue
- }
-
- suggestedObjectTypes = append(suggestedObjectTypes, possibleType.Name)
-
- for _, possibleInterface := range possibleType.Interfaces {
- interfaceField := walker.Schema.Types[possibleInterface]
- if interfaceField != nil && interfaceField.Fields.ForName(name) != nil {
- if interfaceUsageCount[possibleInterface] == 0 {
- suggestedInterfaceTypes = append(suggestedInterfaceTypes, possibleInterface)
- }
- interfaceUsageCount[possibleInterface]++
- }
- }
- }
-
- sort.SliceStable(suggestedInterfaceTypes, func(i, j int) bool {
- return interfaceUsageCount[suggestedInterfaceTypes[i]] > interfaceUsageCount[suggestedInterfaceTypes[j]]
- })
-
- return append(suggestedInterfaceTypes, suggestedObjectTypes...)
-}
-
-// For the field name provided, determine if there are any similar field names
-// that may be the result of a typo.
-func getSuggestedFieldNames(parent *ast.Definition, name string) []string {
- if parent.Kind != ast.Object && parent.Kind != ast.Interface {
- return nil
- }
-
- var possibleFieldNames []string
- for _, field := range parent.Fields {
- possibleFieldNames = append(possibleFieldNames, field.Name)
- }
-
- return SuggestionList(name, possibleFieldNames)
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
deleted file mode 100644
index a4a48246..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package validator
-
-import (
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("FragmentsOnCompositeTypes", func(observers *Events, addError AddErrFunc) {
- observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
- fragmentType := walker.Schema.Types[inlineFragment.TypeCondition]
- if fragmentType == nil || fragmentType.IsCompositeType() {
- return
- }
-
- message := fmt.Sprintf(`Fragment cannot condition on non composite type "%s".`, inlineFragment.TypeCondition)
-
- addError(
- Message(message),
- At(inlineFragment.Position),
- )
- })
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() {
- return
- }
-
- message := fmt.Sprintf(`Fragment "%s" cannot condition on non composite type "%s".`, fragment.Name, fragment.TypeCondition)
-
- addError(
- Message(message),
- At(fragment.Position),
- )
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
deleted file mode 100644
index 1a46431d..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
+++ /dev/null
@@ -1,57 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) {
- // A GraphQL field is only valid if all supplied arguments are defined by that field.
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if field.Definition == nil || field.ObjectDefinition == nil {
- return
- }
- for _, arg := range field.Arguments {
- def := field.Definition.Arguments.ForName(arg.Name)
- if def != nil {
- continue
- }
-
- var suggestions []string
- for _, argDef := range field.Definition.Arguments {
- suggestions = append(suggestions, argDef.Name)
- }
-
- addError(
- Message(`Unknown argument "%s" on field "%s" of type "%s".`, arg.Name, field.Name, field.ObjectDefinition.Name),
- SuggestListQuoted("Did you mean", arg.Name, suggestions),
- At(field.Position),
- )
- }
- })
-
- observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
- if directive.Definition == nil {
- return
- }
- for _, arg := range directive.Arguments {
- def := directive.Definition.Arguments.ForName(arg.Name)
- if def != nil {
- continue
- }
-
- var suggestions []string
- for _, argDef := range directive.Definition.Arguments {
- suggestions = append(suggestions, argDef.Name)
- }
-
- addError(
- Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name),
- SuggestListQuoted("Did you mean", arg.Name, suggestions),
- At(directive.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
deleted file mode 100644
index dc4353ef..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("KnownDirectives", func(observers *Events, addError AddErrFunc) {
- observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
- if directive.Definition == nil {
- addError(
- Message(`Unknown directive "%s".`, directive.Name),
- At(directive.Position),
- )
- return
- }
-
- for _, loc := range directive.Definition.Locations {
- if loc == directive.Location {
- return
- }
- }
-
- addError(
- Message(`Directive "%s" may not be used on %s.`, directive.Name, directive.Location),
- At(directive.Position),
- )
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
deleted file mode 100644
index ec91588c..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) {
- observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
- if fragmentSpread.Definition == nil {
- addError(
- Message(`Unknown fragment "%s".`, fragmentSpread.Name),
- At(fragmentSpread.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
deleted file mode 100644
index 223086b3..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("KnownTypeNames", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- for _, vdef := range operation.VariableDefinitions {
- typeName := vdef.Type.Name()
- def := walker.Schema.Types[typeName]
- if def != nil {
- continue
- }
-
- addError(
- Message(`Unknown type "%s".`, typeName),
- At(operation.Position),
- )
- }
- })
-
- observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
- typedName := inlineFragment.TypeCondition
- if typedName == "" {
- return
- }
-
- def := walker.Schema.Types[typedName]
- if def != nil {
- return
- }
-
- addError(
- Message(`Unknown type "%s".`, typedName),
- At(inlineFragment.Position),
- )
- })
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- typeName := fragment.TypeCondition
- def := walker.Schema.Types[typeName]
- if def != nil {
- return
- }
-
- var possibleTypes []string
- for _, t := range walker.Schema.Types {
- possibleTypes = append(possibleTypes, t.Name)
- }
-
- addError(
- Message(`Unknown type "%s".`, typeName),
- SuggestListQuoted("Did you mean", typeName, possibleTypes),
- At(fragment.Position),
- )
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go b/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
deleted file mode 100644
index dd232142..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("LoneAnonymousOperation", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- if operation.Name == "" && len(walker.Document.Operations) > 1 {
- addError(
- Message(`This anonymous operation must be the only defined operation.`),
- At(operation.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
deleted file mode 100644
index 7511529b..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
+++ /dev/null
@@ -1,93 +0,0 @@
-package validator
-
-import (
- "fmt"
- "strings"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("NoFragmentCycles", func(observers *Events, addError AddErrFunc) {
- visitedFrags := make(map[string]bool)
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- var spreadPath []*ast.FragmentSpread
- spreadPathIndexByName := make(map[string]int)
-
- var recursive func(fragment *ast.FragmentDefinition)
- recursive = func(fragment *ast.FragmentDefinition) {
- if visitedFrags[fragment.Name] {
- return
- }
-
- visitedFrags[fragment.Name] = true
-
- spreadNodes := getFragmentSpreads(fragment.SelectionSet)
- if len(spreadNodes) == 0 {
- return
- }
- spreadPathIndexByName[fragment.Name] = len(spreadPath)
-
- for _, spreadNode := range spreadNodes {
- spreadName := spreadNode.Name
-
- cycleIndex, ok := spreadPathIndexByName[spreadName]
-
- spreadPath = append(spreadPath, spreadNode)
- if !ok {
- spreadFragment := walker.Document.Fragments.ForName(spreadName)
- if spreadFragment != nil {
- recursive(spreadFragment)
- }
- } else {
- cyclePath := spreadPath[cycleIndex : len(spreadPath)-1]
- var fragmentNames []string
- for _, fs := range cyclePath {
- fragmentNames = append(fragmentNames, fs.Name)
- }
- var via string
- if len(fragmentNames) != 0 {
- via = fmt.Sprintf(" via %s", strings.Join(fragmentNames, ", "))
- }
- addError(
- Message(`Cannot spread fragment "%s" within itself%s.`, spreadName, via),
- At(spreadNode.Position),
- )
- }
-
- spreadPath = spreadPath[:len(spreadPath)-1]
- }
-
- delete(spreadPathIndexByName, fragment.Name)
- }
-
- recursive(fragment)
- })
- })
-}
-
-func getFragmentSpreads(node ast.SelectionSet) []*ast.FragmentSpread {
- var spreads []*ast.FragmentSpread
-
- setsToVisit := []ast.SelectionSet{node}
-
- for len(setsToVisit) != 0 {
- set := setsToVisit[len(setsToVisit)-1]
- setsToVisit = setsToVisit[:len(setsToVisit)-1]
-
- for _, selection := range set {
- switch selection := selection.(type) {
- case *ast.FragmentSpread:
- spreads = append(spreads, selection)
- case *ast.Field:
- setsToVisit = append(setsToVisit, selection.SelectionSet)
- case *ast.InlineFragment:
- setsToVisit = append(setsToVisit, selection.SelectionSet)
- }
- }
- }
-
- return spreads
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
deleted file mode 100644
index 505206be..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("NoUndefinedVariables", func(observers *Events, addError AddErrFunc) {
- observers.OnValue(func(walker *Walker, value *ast.Value) {
- if walker.CurrentOperation == nil || value.Kind != ast.Variable || value.VariableDefinition != nil {
- return
- }
-
- if walker.CurrentOperation.Name != "" {
- addError(
- Message(`Variable "%s" is not defined by operation "%s".`, value, walker.CurrentOperation.Name),
- At(walker.CurrentOperation.Position),
- )
- } else {
- addError(
- Message(`Variable "%s" is not defined.`, value),
- At(value.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
deleted file mode 100644
index 4aa835f5..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("NoUnusedFragments", func(observers *Events, addError AddErrFunc) {
-
- inFragmentDefinition := false
- fragmentNameUsed := make(map[string]bool)
-
- observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
- if !inFragmentDefinition {
- fragmentNameUsed[fragmentSpread.Name] = true
- }
- })
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- inFragmentDefinition = true
- if !fragmentNameUsed[fragment.Name] {
- addError(
- Message(`Fragment "%s" is never used.`, fragment.Name),
- At(fragment.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
deleted file mode 100644
index 28cf7736..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- for _, varDef := range operation.VariableDefinitions {
- if varDef.Used {
- continue
- }
-
- if operation.Name != "" {
- addError(
- Message(`Variable "$%s" is never used in operation "%s".`, varDef.Variable, operation.Name),
- At(varDef.Position),
- )
- } else {
- addError(
- Message(`Variable "$%s" is never used.`, varDef.Variable),
- At(varDef.Position),
- )
- }
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go b/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
deleted file mode 100644
index bb2f1831..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
+++ /dev/null
@@ -1,557 +0,0 @@
-package validator
-
-import (
- "bytes"
- "fmt"
- "reflect"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
-
- AddRule("OverlappingFieldsCanBeMerged", func(observers *Events, addError AddErrFunc) {
- /**
- * Algorithm:
- *
- * Conflicts occur when two fields exist in a query which will produce the same
- * response name, but represent differing values, thus creating a conflict.
- * The algorithm below finds all conflicts via making a series of comparisons
- * between fields. In order to compare as few fields as possible, this makes
- * a series of comparisons "within" sets of fields and "between" sets of fields.
- *
- * Given any selection set, a collection produces both a set of fields by
- * also including all inline fragments, as well as a list of fragments
- * referenced by fragment spreads.
- *
- * A) Each selection set represented in the document first compares "within" its
- * collected set of fields, finding any conflicts between every pair of
- * overlapping fields.
- * Note: This is the *only time* that a the fields "within" a set are compared
- * to each other. After this only fields "between" sets are compared.
- *
- * B) Also, if any fragment is referenced in a selection set, then a
- * comparison is made "between" the original set of fields and the
- * referenced fragment.
- *
- * C) Also, if multiple fragments are referenced, then comparisons
- * are made "between" each referenced fragment.
- *
- * D) When comparing "between" a set of fields and a referenced fragment, first
- * a comparison is made between each field in the original set of fields and
- * each field in the the referenced set of fields.
- *
- * E) Also, if any fragment is referenced in the referenced selection set,
- * then a comparison is made "between" the original set of fields and the
- * referenced fragment (recursively referring to step D).
- *
- * F) When comparing "between" two fragments, first a comparison is made between
- * each field in the first referenced set of fields and each field in the the
- * second referenced set of fields.
- *
- * G) Also, any fragments referenced by the first must be compared to the
- * second, and any fragments referenced by the second must be compared to the
- * first (recursively referring to step F).
- *
- * H) When comparing two fields, if both have selection sets, then a comparison
- * is made "between" both selection sets, first comparing the set of fields in
- * the first selection set with the set of fields in the second.
- *
- * I) Also, if any fragment is referenced in either selection set, then a
- * comparison is made "between" the other set of fields and the
- * referenced fragment.
- *
- * J) Also, if two fragments are referenced in both selection sets, then a
- * comparison is made "between" the two fragments.
- *
- */
-
- m := &overlappingFieldsCanBeMergedManager{
- comparedFragmentPairs: pairSet{data: make(map[string]map[string]bool)},
- }
-
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- m.walker = walker
- conflicts := m.findConflictsWithinSelectionSet(operation.SelectionSet)
- for _, conflict := range conflicts {
- conflict.addFieldsConflictMessage(addError)
- }
- })
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if walker.CurrentOperation == nil {
- // When checking both Operation and Fragment, errors are duplicated when processing FragmentDefinition referenced from Operation
- return
- }
- m.walker = walker
- conflicts := m.findConflictsWithinSelectionSet(field.SelectionSet)
- for _, conflict := range conflicts {
- conflict.addFieldsConflictMessage(addError)
- }
- })
- observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
- m.walker = walker
- conflicts := m.findConflictsWithinSelectionSet(inlineFragment.SelectionSet)
- for _, conflict := range conflicts {
- conflict.addFieldsConflictMessage(addError)
- }
- })
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- m.walker = walker
- conflicts := m.findConflictsWithinSelectionSet(fragment.SelectionSet)
- for _, conflict := range conflicts {
- conflict.addFieldsConflictMessage(addError)
- }
- })
- })
-}
-
-type pairSet struct {
- data map[string]map[string]bool
-}
-
-func (pairSet *pairSet) Add(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) {
- add := func(a *ast.FragmentSpread, b *ast.FragmentSpread) {
- m := pairSet.data[a.Name]
- if m == nil {
- m = make(map[string]bool)
- pairSet.data[a.Name] = m
- }
- m[b.Name] = areMutuallyExclusive
- }
- add(a, b)
- add(b, a)
-}
-
-func (pairSet *pairSet) Has(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) bool {
- am, ok := pairSet.data[a.Name]
- if !ok {
- return false
- }
- result, ok := am[b.Name]
- if !ok {
- return false
- }
-
- // areMutuallyExclusive being false is a superset of being true,
- // hence if we want to know if this PairSet "has" these two with no
- // exclusivity, we have to ensure it was added as such.
- if !areMutuallyExclusive {
- return !result
- }
-
- return true
-}
-
-type sequentialFieldsMap struct {
- // We can't use map[string][]*ast.Field. because map is not stable...
- seq []string
- data map[string][]*ast.Field
-}
-
-type fieldIterateEntry struct {
- ResponseName string
- Fields []*ast.Field
-}
-
-func (m *sequentialFieldsMap) Push(responseName string, field *ast.Field) {
- fields, ok := m.data[responseName]
- if !ok {
- m.seq = append(m.seq, responseName)
- }
- fields = append(fields, field)
- m.data[responseName] = fields
-}
-
-func (m *sequentialFieldsMap) Get(responseName string) ([]*ast.Field, bool) {
- fields, ok := m.data[responseName]
- return fields, ok
-}
-
-func (m *sequentialFieldsMap) Iterator() [][]*ast.Field {
- fieldsList := make([][]*ast.Field, 0, len(m.seq))
- for _, responseName := range m.seq {
- fields := m.data[responseName]
- fieldsList = append(fieldsList, fields)
- }
- return fieldsList
-}
-
-func (m *sequentialFieldsMap) KeyValueIterator() []*fieldIterateEntry {
- fieldEntriesList := make([]*fieldIterateEntry, 0, len(m.seq))
- for _, responseName := range m.seq {
- fields := m.data[responseName]
- fieldEntriesList = append(fieldEntriesList, &fieldIterateEntry{
- ResponseName: responseName,
- Fields: fields,
- })
- }
- return fieldEntriesList
-}
-
-type conflictMessageContainer struct {
- Conflicts []*ConflictMessage
-}
-
-type ConflictMessage struct {
- Message string
- ResponseName string
- Names []string
- SubMessage []*ConflictMessage
- Position *ast.Position
-}
-
-func (m *ConflictMessage) String(buf *bytes.Buffer) {
- if len(m.SubMessage) == 0 {
- buf.WriteString(m.Message)
- return
- }
-
- for idx, subMessage := range m.SubMessage {
- buf.WriteString(`subfields "`)
- buf.WriteString(subMessage.ResponseName)
- buf.WriteString(`" conflict because `)
- subMessage.String(buf)
- if idx != len(m.SubMessage)-1 {
- buf.WriteString(" and ")
- }
- }
-}
-
-func (m *ConflictMessage) addFieldsConflictMessage(addError AddErrFunc) {
- var buf bytes.Buffer
- m.String(&buf)
- addError(
- Message(`Fields "%s" conflict because %s. Use different aliases on the fields to fetch both if this was intentional.`, m.ResponseName, buf.String()),
- At(m.Position),
- )
-}
-
-type overlappingFieldsCanBeMergedManager struct {
- walker *Walker
-
- // per walker
- comparedFragmentPairs pairSet
- // cachedFieldsAndFragmentNames interface{}
-
- // per selectionSet
- comparedFragments map[string]bool
-}
-
-func (m *overlappingFieldsCanBeMergedManager) findConflictsWithinSelectionSet(selectionSet ast.SelectionSet) []*ConflictMessage {
- if len(selectionSet) == 0 {
- return nil
- }
-
- fieldsMap, fragmentSpreads := getFieldsAndFragmentNames(selectionSet)
-
- var conflicts conflictMessageContainer
-
- // (A) Find find all conflicts "within" the fieldMap of this selection set.
- // Note: this is the *only place* `collectConflictsWithin` is called.
- m.collectConflictsWithin(&conflicts, fieldsMap)
-
- m.comparedFragments = make(map[string]bool)
- for idx, fragmentSpreadA := range fragmentSpreads {
- // (B) Then collect conflicts between these fieldMap and those represented by
- // each spread fragment name found.
- m.collectConflictsBetweenFieldsAndFragment(&conflicts, false, fieldsMap, fragmentSpreadA)
-
- for _, fragmentSpreadB := range fragmentSpreads[idx+1:] {
- // (C) Then compare this fragment with all other fragments found in this
- // selection set to collect conflicts between fragments spread together.
- // This compares each item in the list of fragment names to every other
- // item in that same list (except for itself).
- m.collectConflictsBetweenFragments(&conflicts, false, fragmentSpreadA, fragmentSpreadB)
- }
- }
-
- return conflicts.Conflicts
-}
-
-func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFieldsAndFragment(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fieldsMap *sequentialFieldsMap, fragmentSpread *ast.FragmentSpread) {
- if m.comparedFragments[fragmentSpread.Name] {
- return
- }
- m.comparedFragments[fragmentSpread.Name] = true
-
- if fragmentSpread.Definition == nil {
- return
- }
-
- fieldsMapB, fragmentSpreads := getFieldsAndFragmentNames(fragmentSpread.Definition.SelectionSet)
-
- // Do not compare a fragment's fieldMap to itself.
- if reflect.DeepEqual(fieldsMap, fieldsMapB) {
- return
- }
-
- // (D) First collect any conflicts between the provided collection of fields
- // and the collection of fields represented by the given fragment.
- m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMap, fieldsMapB)
-
- // (E) Then collect any conflicts between the provided collection of fields
- // and any fragment names found in the given fragment.
- baseFragmentSpread := fragmentSpread
- for _, fragmentSpread := range fragmentSpreads {
- if fragmentSpread.Name == baseFragmentSpread.Name {
- continue
- }
- m.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsMap, fragmentSpread)
- }
-}
-
-func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFragments(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
-
- var check func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread)
- check = func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
-
- if fragmentSpreadA.Name == fragmentSpreadB.Name {
- return
- }
-
- if m.comparedFragmentPairs.Has(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive) {
- return
- }
- m.comparedFragmentPairs.Add(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive)
-
- if fragmentSpreadA.Definition == nil {
- return
- }
- if fragmentSpreadB.Definition == nil {
- return
- }
-
- fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(fragmentSpreadA.Definition.SelectionSet)
- fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(fragmentSpreadB.Definition.SelectionSet)
-
- // (F) First, collect all conflicts between these two collections of fields
- // (not including any nested fragments).
- m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
-
- // (G) Then collect conflicts between the first fragment and any nested
- // fragments spread in the second fragment.
- for _, fragmentSpread := range fragmentSpreadsB {
- check(fragmentSpreadA, fragmentSpread)
- }
- // (G) Then collect conflicts between the second fragment and any nested
- // fragments spread in the first fragment.
- for _, fragmentSpread := range fragmentSpreadsA {
- check(fragmentSpread, fragmentSpreadB)
- }
- }
-
- check(fragmentSpreadA, fragmentSpreadB)
-}
-
-func (m *overlappingFieldsCanBeMergedManager) findConflictsBetweenSubSelectionSets(areMutuallyExclusive bool, selectionSetA ast.SelectionSet, selectionSetB ast.SelectionSet) *conflictMessageContainer {
- var conflicts conflictMessageContainer
-
- fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(selectionSetA)
- fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(selectionSetB)
-
- // (H) First, collect all conflicts between these two collections of field.
- m.collectConflictsBetween(&conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
-
- // (I) Then collect conflicts between the first collection of fields and
- // those referenced by each fragment name associated with the second.
- for _, fragmentSpread := range fragmentSpreadsB {
- m.comparedFragments = make(map[string]bool)
- m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapA, fragmentSpread)
- }
-
- // (I) Then collect conflicts between the second collection of fields and
- // those referenced by each fragment name associated with the first.
- for _, fragmentSpread := range fragmentSpreadsA {
- m.comparedFragments = make(map[string]bool)
- m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapB, fragmentSpread)
- }
-
- // (J) Also collect conflicts between any fragment names by the first and
- // fragment names by the second. This compares each item in the first set of
- // names to each item in the second set of names.
- for _, fragmentSpreadA := range fragmentSpreadsA {
- for _, fragmentSpreadB := range fragmentSpreadsB {
- m.collectConflictsBetweenFragments(&conflicts, areMutuallyExclusive, fragmentSpreadA, fragmentSpreadB)
- }
- }
-
- if len(conflicts.Conflicts) == 0 {
- return nil
- }
-
- return &conflicts
-}
-
-func (m *overlappingFieldsCanBeMergedManager) collectConflictsWithin(conflicts *conflictMessageContainer, fieldsMap *sequentialFieldsMap) {
- for _, fields := range fieldsMap.Iterator() {
- for idx, fieldA := range fields {
- for _, fieldB := range fields[idx+1:] {
- conflict := m.findConflict(false, fieldA, fieldB)
- if conflict != nil {
- conflicts.Conflicts = append(conflicts.Conflicts, conflict)
- }
- }
- }
- }
-}
-
-func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetween(conflicts *conflictMessageContainer, parentFieldsAreMutuallyExclusive bool, fieldsMapA *sequentialFieldsMap, fieldsMapB *sequentialFieldsMap) {
- for _, fieldsEntryA := range fieldsMapA.KeyValueIterator() {
- fieldsB, ok := fieldsMapB.Get(fieldsEntryA.ResponseName)
- if !ok {
- continue
- }
- for _, fieldA := range fieldsEntryA.Fields {
- for _, fieldB := range fieldsB {
- conflict := m.findConflict(parentFieldsAreMutuallyExclusive, fieldA, fieldB)
- if conflict != nil {
- conflicts.Conflicts = append(conflicts.Conflicts, conflict)
- }
- }
- }
- }
-}
-
-func (m *overlappingFieldsCanBeMergedManager) findConflict(parentFieldsAreMutuallyExclusive bool, fieldA *ast.Field, fieldB *ast.Field) *ConflictMessage {
- if fieldA.Definition == nil || fieldA.ObjectDefinition == nil || fieldB.Definition == nil || fieldB.ObjectDefinition == nil {
- return nil
- }
-
- areMutuallyExclusive := parentFieldsAreMutuallyExclusive
- if !areMutuallyExclusive {
- tmp := fieldA.ObjectDefinition.Name != fieldB.ObjectDefinition.Name
- tmp = tmp && fieldA.ObjectDefinition.Kind == ast.Object
- tmp = tmp && fieldB.ObjectDefinition.Kind == ast.Object
- areMutuallyExclusive = tmp
- }
-
- fieldNameA := fieldA.Name
- if fieldA.Alias != "" {
- fieldNameA = fieldA.Alias
- }
-
- if !areMutuallyExclusive {
- // Two aliases must refer to the same field.
- if fieldA.Name != fieldB.Name {
- return &ConflictMessage{
- ResponseName: fieldNameA,
- Message: fmt.Sprintf(`%s and %s are different fields`, fieldA.Name, fieldB.Name),
- Position: fieldB.Position,
- }
- }
-
- // Two field calls must have the same arguments.
- if !sameArguments(fieldA.Arguments, fieldB.Arguments) {
- return &ConflictMessage{
- ResponseName: fieldNameA,
- Message: "they have differing arguments",
- Position: fieldB.Position,
- }
- }
- }
-
- if doTypesConflict(m.walker, fieldA.Definition.Type, fieldB.Definition.Type) {
- return &ConflictMessage{
- ResponseName: fieldNameA,
- Message: fmt.Sprintf(`they return conflicting types %s and %s`, fieldA.Definition.Type.String(), fieldB.Definition.Type.String()),
- Position: fieldB.Position,
- }
- }
-
- // Collect and compare sub-fields. Use the same "visited fragment names" list
- // for both collections so fields in a fragment reference are never
- // compared to themselves.
- conflicts := m.findConflictsBetweenSubSelectionSets(areMutuallyExclusive, fieldA.SelectionSet, fieldB.SelectionSet)
- if conflicts == nil {
- return nil
- }
- return &ConflictMessage{
- ResponseName: fieldNameA,
- SubMessage: conflicts.Conflicts,
- Position: fieldB.Position,
- }
-}
-
-func sameArguments(args1 []*ast.Argument, args2 []*ast.Argument) bool {
- if len(args1) != len(args2) {
- return false
- }
- for _, arg1 := range args1 {
- for _, arg2 := range args2 {
- if arg1.Name != arg2.Name {
- return false
- }
- if !sameValue(arg1.Value, arg2.Value) {
- return false
- }
- }
- }
- return true
-}
-
-func sameValue(value1 *ast.Value, value2 *ast.Value) bool {
- if value1.Kind != value2.Kind {
- return false
- }
- if value1.Raw != value2.Raw {
- return false
- }
- return true
-}
-
-func doTypesConflict(walker *Walker, type1 *ast.Type, type2 *ast.Type) bool {
- if type1.Elem != nil {
- if type2.Elem != nil {
- return doTypesConflict(walker, type1.Elem, type2.Elem)
- }
- return true
- }
- if type2.Elem != nil {
- return true
- }
- if type1.NonNull && !type2.NonNull {
- return true
- }
- if !type1.NonNull && type2.NonNull {
- return true
- }
-
- t1 := walker.Schema.Types[type1.NamedType]
- t2 := walker.Schema.Types[type2.NamedType]
- if (t1.Kind == ast.Scalar || t1.Kind == ast.Enum) && (t2.Kind == ast.Scalar || t2.Kind == ast.Enum) {
- return t1.Name != t2.Name
- }
-
- return false
-}
-
-func getFieldsAndFragmentNames(selectionSet ast.SelectionSet) (*sequentialFieldsMap, []*ast.FragmentSpread) {
- fieldsMap := sequentialFieldsMap{
- data: make(map[string][]*ast.Field),
- }
- var fragmentSpreads []*ast.FragmentSpread
-
- var walk func(selectionSet ast.SelectionSet)
- walk = func(selectionSet ast.SelectionSet) {
- for _, selection := range selectionSet {
- switch selection := selection.(type) {
- case *ast.Field:
- responseName := selection.Name
- if selection.Alias != "" {
- responseName = selection.Alias
- }
- fieldsMap.Push(responseName, selection)
-
- case *ast.InlineFragment:
- walk(selection.SelectionSet)
-
- case *ast.FragmentSpread:
- fragmentSpreads = append(fragmentSpreads, selection)
- }
- }
- }
- walk(selectionSet)
-
- return &fieldsMap, fragmentSpreads
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go b/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
deleted file mode 100644
index 04611834..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
+++ /dev/null
@@ -1,68 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("PossibleFragmentSpreads", func(observers *Events, addError AddErrFunc) {
-
- validate := func(walker *Walker, parentDef *ast.Definition, fragmentName string, emitError func()) {
- if parentDef == nil {
- return
- }
-
- var parentDefs []*ast.Definition
- switch parentDef.Kind {
- case ast.Object:
- parentDefs = []*ast.Definition{parentDef}
- case ast.Interface, ast.Union:
- parentDefs = walker.Schema.GetPossibleTypes(parentDef)
- default:
- return
- }
-
- fragmentDefType := walker.Schema.Types[fragmentName]
- if fragmentDefType == nil {
- return
- }
- if !fragmentDefType.IsCompositeType() {
- // checked by FragmentsOnCompositeTypes
- return
- }
- fragmentDefs := walker.Schema.GetPossibleTypes(fragmentDefType)
-
- for _, fragmentDef := range fragmentDefs {
- for _, parentDef := range parentDefs {
- if parentDef.Name == fragmentDef.Name {
- return
- }
- }
- }
-
- emitError()
- }
-
- observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
- validate(walker, inlineFragment.ObjectDefinition, inlineFragment.TypeCondition, func() {
- addError(
- Message(`Fragment cannot be spread here as objects of type "%s" can never be of type "%s".`, inlineFragment.ObjectDefinition.Name, inlineFragment.TypeCondition),
- At(inlineFragment.Position),
- )
- })
- })
-
- observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
- if fragmentSpread.Definition == nil {
- return
- }
- validate(walker, fragmentSpread.ObjectDefinition, fragmentSpread.Definition.TypeCondition, func() {
- addError(
- Message(`Fragment "%s" cannot be spread here as objects of type "%s" can never be of type "%s".`, fragmentSpread.Name, fragmentSpread.ObjectDefinition.Name, fragmentSpread.Definition.TypeCondition),
- At(fragmentSpread.Position),
- )
- })
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go b/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
deleted file mode 100644
index 55791a6b..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) {
-
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if field.Definition == nil {
- return
- }
-
- argDef:
- for _, argDef := range field.Definition.Arguments {
- if !argDef.Type.NonNull {
- continue
- }
- if argDef.DefaultValue != nil {
- continue
- }
- for _, arg := range field.Arguments {
- if arg.Name == argDef.Name {
- continue argDef
- }
- }
-
- addError(
- Message(`Field "%s" argument "%s" of type "%s" is required but not provided.`, field.Name, argDef.Name, argDef.Type.String()),
- At(field.Position),
- )
- }
- })
-
- observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
- if directive.Definition == nil {
- return
- }
-
- argDef:
- for _, argDef := range directive.Definition.Arguments {
- if !argDef.Type.NonNull {
- continue
- }
- if argDef.DefaultValue != nil {
- continue
- }
- for _, arg := range directive.Arguments {
- if arg.Name == argDef.Name {
- continue argDef
- }
- }
-
- addError(
- Message(`Directive "@%s" argument "%s" of type "%s" is required but not provided.`, directive.Definition.Name, argDef.Name, argDef.Type.String()),
- At(directive.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go b/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
deleted file mode 100644
index bb961f44..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("ScalarLeafs", func(observers *Events, addError AddErrFunc) {
- observers.OnField(func(walker *Walker, field *ast.Field) {
- if field.Definition == nil {
- return
- }
-
- fieldType := walker.Schema.Types[field.Definition.Type.Name()]
- if fieldType == nil {
- return
- }
-
- if fieldType.IsLeafType() && len(field.SelectionSet) > 0 {
- addError(
- Message(`Field "%s" must not have a selection since type "%s" has no subfields.`, field.Name, fieldType.Name),
- At(field.Position),
- )
- }
-
- if !fieldType.IsLeafType() && len(field.SelectionSet) == 0 {
- addError(
- Message(`Field "%s" of type "%s" must have a selection of subfields.`, field.Name, field.Definition.Type.String()),
- Suggestf(`"%s { ... }"`, field.Name),
- At(field.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go b/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
deleted file mode 100644
index 53003c11..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package validator
-
-import (
- "strconv"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("SingleFieldSubscriptions", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- if operation.Operation != ast.Subscription {
- return
- }
-
- if len(operation.SelectionSet) > 1 {
- name := "Anonymous Subscription"
- if operation.Name != "" {
- name = `Subscription ` + strconv.Quote(operation.Name)
- }
-
- addError(
- Message(`%s must select only one top level field.`, name),
- At(operation.SelectionSet[1].GetPosition()),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
deleted file mode 100644
index 0ddcde72..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
+++ /dev/null
@@ -1,33 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) {
- observers.OnField(func(walker *Walker, field *ast.Field) {
- checkUniqueArgs(field.Arguments, addError)
- })
-
- observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
- checkUniqueArgs(directive.Arguments, addError)
- })
- })
-}
-
-func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) {
- knownArgNames := map[string]bool{}
-
- for _, arg := range args {
- if knownArgNames[arg.Name] {
- addError(
- Message(`There can be only one argument named "%s".`, arg.Name),
- At(arg.Position),
- )
- }
-
- knownArgNames[arg.Name] = true
- }
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
deleted file mode 100644
index 077c4687..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) {
- observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) {
- seen := map[string]bool{}
-
- for _, dir := range directives {
- if seen[dir.Name] {
- addError(
- Message(`The directive "%s" can only be used once at this location.`, dir.Name),
- At(dir.Position),
- )
- }
- seen[dir.Name] = true
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
deleted file mode 100644
index 46a8b7c7..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) {
- seenFragments := map[string]bool{}
-
- observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
- if seenFragments[fragment.Name] {
- addError(
- Message(`There can be only one fragment named "%s".`, fragment.Name),
- At(fragment.Position),
- )
- }
- seenFragments[fragment.Name] = true
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
deleted file mode 100644
index f254d588..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
+++ /dev/null
@@ -1,27 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) {
- observers.OnValue(func(walker *Walker, value *ast.Value) {
- if value.Kind != ast.ObjectValue {
- return
- }
-
- seen := map[string]bool{}
- for _, field := range value.Children {
- if seen[field.Name] {
- addError(
- Message(`There can be only one input field named "%s".`, field.Name),
- At(field.Position),
- )
- }
- seen[field.Name] = true
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
deleted file mode 100644
index c1ab56be..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) {
- seen := map[string]bool{}
-
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- if seen[operation.Name] {
- addError(
- Message(`There can be only one operation named "%s".`, operation.Name),
- At(operation.Position),
- )
- }
- seen[operation.Name] = true
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
deleted file mode 100644
index 70590a88..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- seen := map[string]bool{}
- for _, def := range operation.VariableDefinitions {
- if seen[def.Variable] {
- addError(
- Message(`There can be only one variable named "%s".`, def.Variable),
- At(def.Position),
- )
- }
- seen[def.Variable] = true
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
deleted file mode 100644
index d64cc666..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
+++ /dev/null
@@ -1,130 +0,0 @@
-package validator
-
-import (
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) {
- observers.OnValue(func(walker *Walker, value *ast.Value) {
- if value.Definition == nil || value.ExpectedType == nil {
- return
- }
-
- if value.Definition.Kind == ast.Scalar {
- // Skip custom validating scalars
- if !value.Definition.OneOf("Int", "Float", "String", "Boolean", "ID") {
- return
- }
- }
-
- var possibleEnums []string
- if value.Definition.Kind == ast.Enum {
- for _, val := range value.Definition.EnumValues {
- possibleEnums = append(possibleEnums, val.Name)
- }
- }
-
- rawVal, err := value.Value(nil)
- if err != nil {
- unexpectedTypeMessage(addError, value)
- }
-
- switch value.Kind {
- case ast.NullValue:
- if value.ExpectedType.NonNull {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.ListValue:
- if value.ExpectedType.Elem == nil {
- unexpectedTypeMessage(addError, value)
- return
- }
-
- case ast.IntValue:
- if !value.Definition.OneOf("Int", "Float", "ID") {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.FloatValue:
- if !value.Definition.OneOf("Float") {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.StringValue, ast.BlockValue:
- if value.Definition.Kind == ast.Enum {
- rawValStr := fmt.Sprint(rawVal)
- addError(
- Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
- SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
- At(value.Position),
- )
- } else if !value.Definition.OneOf("String", "ID") {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.EnumValue:
- if value.Definition.Kind != ast.Enum || value.Definition.EnumValues.ForName(value.Raw) == nil {
- rawValStr := fmt.Sprint(rawVal)
- addError(
- Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
- SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
- At(value.Position),
- )
- }
-
- case ast.BooleanValue:
- if !value.Definition.OneOf("Boolean") {
- unexpectedTypeMessage(addError, value)
- }
-
- case ast.ObjectValue:
-
- for _, field := range value.Definition.Fields {
- if field.Type.NonNull {
- fieldValue := value.Children.ForName(field.Name)
- if fieldValue == nil && field.DefaultValue == nil {
- addError(
- Message("Field %s.%s of required type %s was not provided.", value.Definition.Name, field.Name, field.Type.String()),
- At(value.Position),
- )
- continue
- }
- }
- }
-
- for _, fieldValue := range value.Children {
- if value.Definition.Fields.ForName(fieldValue.Name) == nil {
- var suggestions []string
- for _, fieldValue := range value.Definition.Fields {
- suggestions = append(suggestions, fieldValue.Name)
- }
-
- addError(
- Message(`Field "%s" is not defined by type %s.`, fieldValue.Name, value.Definition.Name),
- SuggestListUnquoted("Did you mean", fieldValue.Name, suggestions),
- At(fieldValue.Position),
- )
- }
- }
-
- case ast.Variable:
- return
-
- default:
- panic(fmt.Errorf("unhandled %T", value))
- }
- })
- })
-}
-
-func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) {
- addError(
- Message("Expected type %s, found %s.", v.ExpectedType.String(), v.String()),
- At(v.Position),
- )
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
deleted file mode 100644
index 9d58ae1c..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) {
- observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
- for _, def := range operation.VariableDefinitions {
- if def.Definition == nil {
- continue
- }
- if !def.Definition.IsInputType() {
- addError(
- Message(
- `Variable "$%s" cannot be non-input type "%s".`,
- def.Variable,
- def.Type.String(),
- ),
- At(def.Position),
- )
- }
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
deleted file mode 100644
index e6d97c9f..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package validator
-
-import (
- "github.com/vektah/gqlparser/ast"
- . "github.com/vektah/gqlparser/validator"
-)
-
-func init() {
- AddRule("VariablesInAllowedPosition", func(observers *Events, addError AddErrFunc) {
- observers.OnValue(func(walker *Walker, value *ast.Value) {
- if value.Kind != ast.Variable || value.ExpectedType == nil || value.VariableDefinition == nil || walker.CurrentOperation == nil {
- return
- }
-
- // todo: move me into walk
- // If there is a default non nullable types can be null
- if value.VariableDefinition.DefaultValue != nil && value.VariableDefinition.DefaultValue.Kind != ast.NullValue {
- if value.ExpectedType.NonNull {
- value.ExpectedType.NonNull = false
- }
- }
-
- if !value.VariableDefinition.Type.IsCompatible(value.ExpectedType) {
- addError(
- Message(
- `Variable "%s" of type "%s" used in position expecting type "%s".`,
- value,
- value.VariableDefinition.Type.String(),
- value.ExpectedType.String(),
- ),
- At(value.Position),
- )
- }
- })
- })
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/schema.go b/vendor/github.com/vektah/gqlparser/validator/schema.go
deleted file mode 100644
index 57d2022e..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/schema.go
+++ /dev/null
@@ -1,276 +0,0 @@
-//go:generate go run ./inliner/inliner.go
-
-package validator
-
-import (
- "strconv"
- "strings"
-
- . "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
- "github.com/vektah/gqlparser/parser"
-)
-
-func LoadSchema(inputs ...*Source) (*Schema, *gqlerror.Error) {
- ast, err := parser.ParseSchemas(inputs...)
- if err != nil {
- return nil, err
- }
- return ValidateSchemaDocument(ast)
-}
-
-func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, *gqlerror.Error) {
- schema := Schema{
- Types: map[string]*Definition{},
- Directives: map[string]*DirectiveDefinition{},
- PossibleTypes: map[string][]*Definition{},
- Implements: map[string][]*Definition{},
- }
-
- for i, def := range ast.Definitions {
- if schema.Types[def.Name] != nil {
- return nil, gqlerror.ErrorPosf(def.Position, "Cannot redeclare type %s.", def.Name)
- }
- schema.Types[def.Name] = ast.Definitions[i]
- }
-
- for _, ext := range ast.Extensions {
- def := schema.Types[ext.Name]
- if def == nil {
- return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because it does not exist.", ext.Name)
- }
-
- if def.Kind != ext.Kind {
- return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because the base type is a %s, not %s.", ext.Name, def.Kind, ext.Kind)
- }
-
- def.Directives = append(def.Directives, ext.Directives...)
- def.Interfaces = append(def.Interfaces, ext.Interfaces...)
- def.Fields = append(def.Fields, ext.Fields...)
- def.Types = append(def.Types, ext.Types...)
- def.EnumValues = append(def.EnumValues, ext.EnumValues...)
- }
-
- for _, def := range ast.Definitions {
- switch def.Kind {
- case Union:
- for _, t := range def.Types {
- schema.AddPossibleType(def.Name, schema.Types[t])
- schema.AddImplements(t, def)
- }
- case InputObject, Object:
- for _, intf := range def.Interfaces {
- schema.AddPossibleType(intf, def)
- schema.AddImplements(def.Name, schema.Types[intf])
- }
- schema.AddPossibleType(def.Name, def)
- }
- }
-
- for i, dir := range ast.Directives {
- if schema.Directives[dir.Name] != nil {
- return nil, gqlerror.ErrorPosf(dir.Position, "Cannot redeclare directive %s.", dir.Name)
- }
- schema.Directives[dir.Name] = ast.Directives[i]
- }
-
- if len(ast.Schema) > 1 {
- return nil, gqlerror.ErrorPosf(ast.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.")
- }
-
- if len(ast.Schema) == 1 {
- for _, entrypoint := range ast.Schema[0].OperationTypes {
- def := schema.Types[entrypoint.Type]
- if def == nil {
- return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type)
- }
- switch entrypoint.Operation {
- case Query:
- schema.Query = def
- case Mutation:
- schema.Mutation = def
- case Subscription:
- schema.Subscription = def
- }
- }
- }
-
- for _, ext := range ast.SchemaExtension {
- for _, entrypoint := range ext.OperationTypes {
- def := schema.Types[entrypoint.Type]
- if def == nil {
- return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type)
- }
- switch entrypoint.Operation {
- case Query:
- schema.Query = def
- case Mutation:
- schema.Mutation = def
- case Subscription:
- schema.Subscription = def
- }
- }
- }
-
- for _, typ := range schema.Types {
- err := validateDefinition(&schema, typ)
- if err != nil {
- return nil, err
- }
- }
-
- for _, dir := range schema.Directives {
- err := validateDirective(&schema, dir)
- if err != nil {
- return nil, err
- }
- }
-
- if schema.Query == nil && schema.Types["Query"] != nil {
- schema.Query = schema.Types["Query"]
- }
-
- if schema.Mutation == nil && schema.Types["Mutation"] != nil {
- schema.Mutation = schema.Types["Mutation"]
- }
-
- if schema.Subscription == nil && schema.Types["Subscription"] != nil {
- schema.Subscription = schema.Types["Subscription"]
- }
-
- if schema.Query != nil {
- schema.Query.Fields = append(
- schema.Query.Fields,
- &FieldDefinition{
- Name: "__schema",
- Type: NonNullNamedType("__Schema", nil),
- },
- &FieldDefinition{
- Name: "__type",
- Type: NonNullNamedType("__Type", nil),
- Arguments: ArgumentDefinitionList{
- {Name: "name", Type: NamedType("String", nil)},
- },
- },
- )
- }
-
- return &schema, nil
-}
-
-func validateDirective(schema *Schema, def *DirectiveDefinition) *gqlerror.Error {
- if err := validateName(def.Position, def.Name); err != nil {
- // now, GraphQL spec doesn't have reserved directive name
- return err
- }
-
- return validateArgs(schema, def.Arguments, def)
-}
-
-func validateDefinition(schema *Schema, def *Definition) *gqlerror.Error {
- for _, field := range def.Fields {
- if err := validateName(field.Position, field.Name); err != nil {
- // now, GraphQL spec doesn't have reserved field name
- return err
- }
- if err := validateTypeRef(schema, field.Type); err != nil {
- return err
- }
- if err := validateArgs(schema, field.Arguments, nil); err != nil {
- return err
- }
- if err := validateDirectives(schema, field.Directives, nil); err != nil {
- return err
- }
- }
-
- for _, intf := range def.Interfaces {
- intDef := schema.Types[intf]
- if intDef == nil {
- return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intf))
- }
- if intDef.Kind != Interface {
- return gqlerror.ErrorPosf(def.Position, "%s is a non interface type %s.", strconv.Quote(intf), intDef.Kind)
- }
- }
-
- switch def.Kind {
- case Object, Interface:
- if len(def.Fields) == 0 {
- return gqlerror.ErrorPosf(def.Position, "%s must define one or more fields.", def.Kind)
- }
- case Enum:
- if len(def.EnumValues) == 0 {
- return gqlerror.ErrorPosf(def.Position, "%s must define one or more unique enum values.", def.Kind)
- }
- case InputObject:
- if len(def.Fields) == 0 {
- return gqlerror.ErrorPosf(def.Position, "%s must define one or more input fields.", def.Kind)
- }
- }
-
- for idx, field1 := range def.Fields {
- for _, field2 := range def.Fields[idx+1:] {
- if field1.Name == field2.Name {
- return gqlerror.ErrorPosf(field2.Position, "Field %s.%s can only be defined once.", def.Name, field2.Name)
- }
- }
- }
-
- if !def.BuiltIn {
- // GraphQL spec has reserved type names a lot!
- err := validateName(def.Position, def.Name)
- if err != nil {
- return err
- }
- }
-
- return validateDirectives(schema, def.Directives, nil)
-}
-
-func validateTypeRef(schema *Schema, typ *Type) *gqlerror.Error {
- if schema.Types[typ.Name()] == nil {
- return gqlerror.ErrorPosf(typ.Position, "Undefined type %s.", typ.Name())
- }
- return nil
-}
-
-func validateArgs(schema *Schema, args ArgumentDefinitionList, currentDirective *DirectiveDefinition) *gqlerror.Error {
- for _, arg := range args {
- if err := validateName(arg.Position, arg.Name); err != nil {
- // now, GraphQL spec doesn't have reserved argument name
- return err
- }
- if err := validateTypeRef(schema, arg.Type); err != nil {
- return err
- }
- if err := validateDirectives(schema, arg.Directives, currentDirective); err != nil {
- return err
- }
- }
- return nil
-}
-
-func validateDirectives(schema *Schema, dirs DirectiveList, currentDirective *DirectiveDefinition) *gqlerror.Error {
- for _, dir := range dirs {
- if err := validateName(dir.Position, dir.Name); err != nil {
- // now, GraphQL spec doesn't have reserved directive name
- return err
- }
- if currentDirective != nil && dir.Name == currentDirective.Name {
- return gqlerror.ErrorPosf(dir.Position, "Directive %s cannot refer to itself.", currentDirective.Name)
- }
- if schema.Directives[dir.Name] == nil {
- return gqlerror.ErrorPosf(dir.Position, "Undefined directive %s.", dir.Name)
- }
- dir.Definition = schema.Directives[dir.Name]
- }
- return nil
-}
-
-func validateName(pos *Position, name string) *gqlerror.Error {
- if strings.HasPrefix(name, "__") {
- return gqlerror.ErrorPosf(pos, `Name "%s" must not begin with "__", which is reserved by GraphQL introspection.`, name)
- }
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/schema_test.yml b/vendor/github.com/vektah/gqlparser/validator/schema_test.yml
deleted file mode 100644
index abc8dd7e..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/schema_test.yml
+++ /dev/null
@@ -1,323 +0,0 @@
-types:
- - name: cannot be redeclared
- input: |
- type A {
- name: String
- }
- type A {
- name: String
- }
- error:
- message: "Cannot redeclare type A."
- locations: [{line: 4, column: 6}]
- - name: cannot be duplicated field at same definition 1
- input: |
- type A {
- name: String
- name: String
- }
- error:
- message: "Field A.name can only be defined once."
- locations: [{line: 3, column: 3}]
- - name: cannot be duplicated field at same definition 2
- input: |
- type A {
- name: String
- }
- extend type A {
- name: String
- }
- error:
- message: "Field A.name can only be defined once."
- locations: [{line: 5, column: 3}]
- - name: cannot be duplicated field at same definition 3
- input: |
- type A {
- name: String
- }
- extend type A {
- age: Int
- age: Int
- }
- error:
- message: "Field A.age can only be defined once."
- locations: [{line: 6, column: 3}]
-
-object types:
- - name: must define one or more fields
- input: |
- directive @D on OBJECT
-
- # This pattern rejected by parser
- # type InvalidObject1 {}
-
- type InvalidObject2 @D
-
- type ValidObject {
- id: ID
- }
- extend type ValidObject @D
- extend type ValidObject {
- b: Int
- }
- error:
- message: 'OBJECT must define one or more fields.'
- locations: [{line: 6, column: 6}]
- - name: check reserved names on type name
- input: |
- type __FooBar {
- id: ID
- }
- error:
- message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 6}]
- - name: check reserved names on type field
- input: |
- type FooBar {
- __id: ID
- }
- error:
- message: 'Name "__id" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 2, column: 3}]
-
- - name: check reserved names on type field argument
- input: |
- type FooBar {
- foo(__bar: ID): ID
- }
- error:
- message: 'Name "__bar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 2, column: 7}]
-
-interfaces:
- - name: must exist
- input: |
- type Thing implements Object {
- id: ID!
- }
-
- type Query {
- Things: [Thing!]!
- }
- error:
- message: 'Undefined type "Object".'
- locations: [{line: 1, column: 6}]
-
- - name: must be an interface
- input: |
- type Thing implements Object {
- id: ID!
- }
-
- type Query {
- Things: [Thing!]!
- }
-
- type Object {
- name: String
- }
- error:
- message: '"Object" is a non interface type OBJECT.'
- locations: [{line: 1, column: 6}]
-
- - name: must define one or more fields
- input: |
- directive @D on INTERFACE
-
- # This pattern rejected by parser
- # interface InvalidInterface1 {}
-
- interface InvalidInterface2 @D
-
- interface ValidInterface {
- id: ID
- }
- extend interface ValidInterface @D
- extend interface ValidInterface {
- b: Int
- }
- error:
- message: 'INTERFACE must define one or more fields.'
- locations: [{line: 6, column: 11}]
- - name: check reserved names on type name
- input: |
- interface __FooBar {
- id: ID
- }
- error:
- message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 11}]
-
-inputs:
- - name: must define one or more input fields
- input: |
- directive @D on INPUT_OBJECT
-
- # This pattern rejected by parser
- # input InvalidInput1 {}
-
- input InvalidInput2 @D
-
- input ValidInput {
- id: ID
- }
- extend input ValidInput @D
- extend input ValidInput {
- b: Int
- }
- error:
- message: 'INPUT_OBJECT must define one or more input fields.'
- locations: [{line: 6, column: 7}]
- - name: check reserved names on type name
- input: |
- input __FooBar {
- id: ID
- }
- error:
- message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 7}]
-
-enums:
- - name: must define one or more unique enum values
- input: |
- directive @D on ENUM
-
- # This pattern rejected by parser
- # enum InvalidEmum1 {}
-
- enum InvalidEnum2 @D
-
- enum ValidEnum {
- FOO
- }
- extend enum ValidEnum @D
- extend enum ValidEnum {
- BAR
- }
- error:
- message: 'ENUM must define one or more unique enum values.'
- locations: [{line: 6, column: 6}]
- - name: check reserved names on type name
- input: |
- enum __FooBar {
- A
- B
- }
- error:
- message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 6}]
-
-type extensions:
- - name: cannot extend non existant types
- input: |
- extend type A {
- name: String
- }
- error:
- message: "Cannot extend type A because it does not exist."
- locations: [{line: 1, column: 13}]
-
- - name: cannot extend incorret type existant types
- input: |
- scalar A
- extend type A {
- name: String
- }
- error:
- message: "Cannot extend type A because the base type is a SCALAR, not OBJECT."
- locations: [{line: 2, column: 13}]
-
-directives:
- - name: cannot redeclare directives
- input: |
- directive @A on FIELD_DEFINITION
- directive @A on FIELD_DEFINITION
- error:
- message: "Cannot redeclare directive A."
- locations: [{line: 2, column: 12}]
-
- - name: must be declared
- input: |
- type User {
- name: String @foo
- }
- error:
- message: "Undefined directive foo."
- locations: [{line: 2, column: 17}]
-
- - name: cannot be self-referential
- input: |
- directive @A(foo: Int! @A) on FIELD_DEFINITION
- error:
- message: "Directive A cannot refer to itself."
- locations: [{line: 1, column: 25}]
- - name: check reserved names on type name
- input: |
- directive @__A on FIELD_DEFINITION
- error:
- message: 'Name "__A" must not begin with "__", which is reserved by GraphQL introspection.'
- locations: [{line: 1, column: 12}]
-
-entry points:
- - name: multiple schema entry points
- input: |
- schema {
- query: Query
- }
- schema {
- query: Query
- }
- scalar Query
- error:
- message: "Cannot have multiple schema entry points, consider schema extensions instead."
- locations: [{line: 4, column: 8}]
-
- - name: Undefined schema entrypoint
- input: |
- schema {
- query: Query
- }
- error:
- message: "Schema root query refers to a type Query that does not exist."
- locations: [{line: 2, column: 3}]
-
-entry point extensions:
- - name: Undefined schema entrypoint
- input: |
- schema {
- query: Query
- }
- scalar Query
- extend schema {
- mutation: Mutation
- }
- error:
- message: "Schema root mutation refers to a type Mutation that does not exist."
- locations: [{line: 6, column: 3}]
-
-type references:
- - name: Field types
- input: |
- type User {
- posts: Post
- }
- error:
- message: "Undefined type Post."
- locations: [{line: 2, column: 10}]
-
- - name: Arg types
- input: |
- type User {
- posts(foo: FooBar): String
- }
- error:
- message: "Undefined type FooBar."
- locations: [{line: 2, column: 14}]
-
- - name: Directive arg types
- input: |
- directive @Foo(foo: FooBar) on FIELD_DEFINITION
-
- error:
- message: "Undefined type FooBar."
- locations: [{line: 1, column: 21}]
diff --git a/vendor/github.com/vektah/gqlparser/validator/suggestionList.go b/vendor/github.com/vektah/gqlparser/validator/suggestionList.go
deleted file mode 100644
index f58d0fc2..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/suggestionList.go
+++ /dev/null
@@ -1,69 +0,0 @@
-package validator
-
-import (
- "sort"
- "strings"
-
- "github.com/agnivade/levenshtein"
-)
-
-// Given an invalid input string and a list of valid options, returns a filtered
-// list of valid options sorted based on their similarity with the input.
-func SuggestionList(input string, options []string) []string {
- var results []string
- optionsByDistance := map[string]int{}
-
- for _, option := range options {
- distance := lexicalDistance(input, option)
- threshold := calcThreshold(input, option)
- if distance <= threshold {
- results = append(results, option)
- optionsByDistance[option] = distance
- }
- }
-
- sort.Slice(results, func(i, j int) bool {
- return optionsByDistance[results[i]] < optionsByDistance[results[j]]
- })
- return results
-}
-
-func calcThreshold(a, b string) (threshold int) {
- if len(a) >= len(b) {
- threshold = len(a) / 2
- } else {
- threshold = len(b) / 2
- }
- if threshold < 1 {
- threshold = 1
- }
- return
-}
-
-// Computes the lexical distance between strings A and B.
-//
-// The "distance" between two strings is given by counting the minimum number
-// of edits needed to transform string A into string B. An edit can be an
-// insertion, deletion, or substitution of a single character, or a swap of two
-// adjacent characters.
-//
-// Includes a custom alteration from Damerau-Levenshtein to treat case changes
-// as a single edit which helps identify mis-cased values with an edit distance
-// of 1.
-//
-// This distance can be useful for detecting typos in input or sorting
-func lexicalDistance(a, b string) int {
- if a == b {
- return 0
- }
-
- a = strings.ToLower(a)
- b = strings.ToLower(b)
-
- // Any case change counts as a single edit
- if a == b {
- return 1
- }
-
- return levenshtein.ComputeDistance(a, b)
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/validator.go b/vendor/github.com/vektah/gqlparser/validator/validator.go
deleted file mode 100644
index bbacec6f..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/validator.go
+++ /dev/null
@@ -1,44 +0,0 @@
-package validator
-
-import (
- . "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
-)
-
-type AddErrFunc func(options ...ErrorOption)
-
-type ruleFunc func(observers *Events, addError AddErrFunc)
-
-type rule struct {
- name string
- rule ruleFunc
-}
-
-var rules []rule
-
-// addRule to rule set.
-// f is called once each time `Validate` is executed.
-func AddRule(name string, f ruleFunc) {
- rules = append(rules, rule{name: name, rule: f})
-}
-
-func Validate(schema *Schema, doc *QueryDocument) gqlerror.List {
- var errs gqlerror.List
-
- observers := &Events{}
- for i := range rules {
- rule := rules[i]
- rule.rule(observers, func(options ...ErrorOption) {
- err := &gqlerror.Error{
- Rule: rule.name,
- }
- for _, o := range options {
- o(err)
- }
- errs = append(errs, err)
- })
- }
-
- Walk(schema, doc, observers)
- return errs
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/vars.go b/vendor/github.com/vektah/gqlparser/validator/vars.go
deleted file mode 100644
index aaf3a0d1..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/vars.go
+++ /dev/null
@@ -1,199 +0,0 @@
-package validator
-
-import (
- "reflect"
-
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
-)
-
-var UnexpectedType = fmt.Errorf("Unexpected Type")
-
-// VariableValues coerces and validates variable values
-func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables map[string]interface{}) (map[string]interface{}, *gqlerror.Error) {
- coercedVars := map[string]interface{}{}
-
- validator := varValidator{
- path: []interface{}{"variable"},
- schema: schema,
- }
-
- for _, v := range op.VariableDefinitions {
- validator.path = append(validator.path, v.Variable)
-
- if !v.Definition.IsInputType() {
- return nil, gqlerror.ErrorPathf(validator.path, "must an input type")
- }
-
- val, hasValue := variables[v.Variable]
- if !hasValue {
- if v.DefaultValue != nil {
- var err error
- val, err = v.DefaultValue.Value(nil)
- if err != nil {
- return nil, gqlerror.WrapPath(validator.path, err)
- }
- hasValue = true
- } else if v.Type.NonNull {
- return nil, gqlerror.ErrorPathf(validator.path, "must be defined")
- }
- }
-
- if hasValue {
- if val == nil {
- if v.Type.NonNull {
- return nil, gqlerror.ErrorPathf(validator.path, "cannot be null")
- }
- coercedVars[v.Variable] = nil
- } else {
- rv := reflect.ValueOf(val)
- if rv.Kind() == reflect.Ptr || rv.Kind() == reflect.Interface {
- rv = rv.Elem()
- }
-
- if err := validator.validateVarType(v.Type, rv); err != nil {
- return nil, err
- }
-
- coercedVars[v.Variable] = val
- }
- }
-
- validator.path = validator.path[0 : len(validator.path)-1]
- }
-
- return coercedVars, nil
-}
-
-type varValidator struct {
- path []interface{}
- schema *ast.Schema
-}
-
-func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) *gqlerror.Error {
- currentPath := v.path
- resetPath := func() {
- v.path = currentPath
- }
- defer resetPath()
-
- if typ.Elem != nil {
- if val.Kind() != reflect.Slice {
- return gqlerror.ErrorPathf(v.path, "must be an array")
- }
-
- for i := 0; i < val.Len(); i++ {
- resetPath()
- v.path = append(v.path, i)
- field := val.Index(i)
-
- if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface {
- if typ.Elem.NonNull && field.IsNil() {
- return gqlerror.ErrorPathf(v.path, "cannot be null")
- }
- field = field.Elem()
- }
-
- if err := v.validateVarType(typ.Elem, field); err != nil {
- return err
- }
- }
-
- return nil
- }
-
- def := v.schema.Types[typ.NamedType]
- if def == nil {
- panic(fmt.Errorf("missing def for %s", typ.NamedType))
- }
-
- switch def.Kind {
- case ast.Enum:
- kind := val.Type().Kind()
- if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String {
- return nil
- }
- return gqlerror.ErrorPathf(v.path, "enums must be ints or strings")
- case ast.Scalar:
- kind := val.Type().Kind()
- switch typ.NamedType {
- case "Int":
- if kind == reflect.String || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 {
- return nil
- }
- case "Float":
- if kind == reflect.String || kind == reflect.Float32 || kind == reflect.Float64 || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 {
- return nil
- }
- case "String":
- if kind == reflect.String {
- return nil
- }
-
- case "Boolean":
- if kind == reflect.Bool {
- return nil
- }
-
- case "ID":
- if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String {
- return nil
- }
- default:
- // assume custom scalars are ok
- return nil
- }
- return gqlerror.ErrorPathf(v.path, "cannot use %s as %s", kind.String(), typ.NamedType)
- case ast.InputObject:
- if val.Kind() != reflect.Map {
- return gqlerror.ErrorPathf(v.path, "must be a %s", def.Name)
- }
-
- // check for unknown fields
- for _, name := range val.MapKeys() {
- val.MapIndex(name)
- fieldDef := def.Fields.ForName(name.String())
- resetPath()
- v.path = append(v.path, name.String())
-
- if fieldDef == nil {
- return gqlerror.ErrorPathf(v.path, "unknown field")
- }
- }
-
- for _, fieldDef := range def.Fields {
- resetPath()
- v.path = append(v.path, fieldDef.Name)
-
- field := val.MapIndex(reflect.ValueOf(fieldDef.Name))
- if !field.IsValid() {
- if fieldDef.Type.NonNull {
- return gqlerror.ErrorPathf(v.path, "must be defined")
- }
- continue
- }
-
- if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface {
- if fieldDef.Type.NonNull && field.IsNil() {
- return gqlerror.ErrorPathf(v.path, "cannot be null")
- }
- //allow null object field and skip it
- if !fieldDef.Type.NonNull && field.IsNil() {
- continue
- }
- field = field.Elem()
- }
-
- err := v.validateVarType(fieldDef.Type, field)
- if err != nil {
- return err
- }
- }
- default:
- panic(fmt.Errorf("unsupported type %s", def.Kind))
- }
-
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlparser/validator/walk.go b/vendor/github.com/vektah/gqlparser/validator/walk.go
deleted file mode 100644
index 751ba1f1..00000000
--- a/vendor/github.com/vektah/gqlparser/validator/walk.go
+++ /dev/null
@@ -1,286 +0,0 @@
-package validator
-
-import (
- "context"
- "fmt"
-
- "github.com/vektah/gqlparser/ast"
-)
-
-type Events struct {
- operationVisitor []func(walker *Walker, operation *ast.OperationDefinition)
- field []func(walker *Walker, field *ast.Field)
- fragment []func(walker *Walker, fragment *ast.FragmentDefinition)
- inlineFragment []func(walker *Walker, inlineFragment *ast.InlineFragment)
- fragmentSpread []func(walker *Walker, fragmentSpread *ast.FragmentSpread)
- directive []func(walker *Walker, directive *ast.Directive)
- directiveList []func(walker *Walker, directives []*ast.Directive)
- value []func(walker *Walker, value *ast.Value)
-}
-
-func (o *Events) OnOperation(f func(walker *Walker, operation *ast.OperationDefinition)) {
- o.operationVisitor = append(o.operationVisitor, f)
-}
-func (o *Events) OnField(f func(walker *Walker, field *ast.Field)) {
- o.field = append(o.field, f)
-}
-func (o *Events) OnFragment(f func(walker *Walker, fragment *ast.FragmentDefinition)) {
- o.fragment = append(o.fragment, f)
-}
-func (o *Events) OnInlineFragment(f func(walker *Walker, inlineFragment *ast.InlineFragment)) {
- o.inlineFragment = append(o.inlineFragment, f)
-}
-func (o *Events) OnFragmentSpread(f func(walker *Walker, fragmentSpread *ast.FragmentSpread)) {
- o.fragmentSpread = append(o.fragmentSpread, f)
-}
-func (o *Events) OnDirective(f func(walker *Walker, directive *ast.Directive)) {
- o.directive = append(o.directive, f)
-}
-func (o *Events) OnDirectiveList(f func(walker *Walker, directives []*ast.Directive)) {
- o.directiveList = append(o.directiveList, f)
-}
-func (o *Events) OnValue(f func(walker *Walker, value *ast.Value)) {
- o.value = append(o.value, f)
-}
-
-func Walk(schema *ast.Schema, document *ast.QueryDocument, observers *Events) {
- w := Walker{
- Observers: observers,
- Schema: schema,
- Document: document,
- }
-
- w.walk()
-}
-
-type Walker struct {
- Context context.Context
- Observers *Events
- Schema *ast.Schema
- Document *ast.QueryDocument
-
- validatedFragmentSpreads map[string]bool
- CurrentOperation *ast.OperationDefinition
-}
-
-func (w *Walker) walk() {
- for _, child := range w.Document.Operations {
- w.validatedFragmentSpreads = make(map[string]bool)
- w.walkOperation(child)
- }
- for _, child := range w.Document.Fragments {
- w.validatedFragmentSpreads = make(map[string]bool)
- w.walkFragment(child)
- }
-}
-
-func (w *Walker) walkOperation(operation *ast.OperationDefinition) {
- w.CurrentOperation = operation
- for _, varDef := range operation.VariableDefinitions {
- varDef.Definition = w.Schema.Types[varDef.Type.Name()]
-
- if varDef.DefaultValue != nil {
- varDef.DefaultValue.ExpectedType = varDef.Type
- varDef.DefaultValue.Definition = w.Schema.Types[varDef.Type.Name()]
- }
- }
-
- var def *ast.Definition
- var loc ast.DirectiveLocation
- switch operation.Operation {
- case ast.Query, "":
- def = w.Schema.Query
- loc = ast.LocationQuery
- case ast.Mutation:
- def = w.Schema.Mutation
- loc = ast.LocationMutation
- case ast.Subscription:
- def = w.Schema.Subscription
- loc = ast.LocationSubscription
- }
-
- w.walkDirectives(def, operation.Directives, loc)
-
- for _, varDef := range operation.VariableDefinitions {
- if varDef.DefaultValue != nil {
- w.walkValue(varDef.DefaultValue)
- }
- }
-
- w.walkSelectionSet(def, operation.SelectionSet)
-
- for _, v := range w.Observers.operationVisitor {
- v(w, operation)
- }
- w.CurrentOperation = nil
-}
-
-func (w *Walker) walkFragment(it *ast.FragmentDefinition) {
- def := w.Schema.Types[it.TypeCondition]
-
- it.Definition = def
-
- w.walkDirectives(def, it.Directives, ast.LocationFragmentDefinition)
- w.walkSelectionSet(def, it.SelectionSet)
-
- for _, v := range w.Observers.fragment {
- v(w, it)
- }
-}
-
-func (w *Walker) walkDirectives(parentDef *ast.Definition, directives []*ast.Directive, location ast.DirectiveLocation) {
- for _, dir := range directives {
- def := w.Schema.Directives[dir.Name]
- dir.Definition = def
- dir.ParentDefinition = parentDef
- dir.Location = location
-
- for _, arg := range dir.Arguments {
- var argDef *ast.ArgumentDefinition
- if def != nil {
- argDef = def.Arguments.ForName(arg.Name)
- }
-
- w.walkArgument(argDef, arg)
- }
-
- for _, v := range w.Observers.directive {
- v(w, dir)
- }
- }
-
- for _, v := range w.Observers.directiveList {
- v(w, directives)
- }
-}
-
-func (w *Walker) walkValue(value *ast.Value) {
- if value.Kind == ast.Variable && w.CurrentOperation != nil {
- value.VariableDefinition = w.CurrentOperation.VariableDefinitions.ForName(value.Raw)
- if value.VariableDefinition != nil {
- value.VariableDefinition.Used = true
- }
- }
-
- if value.Kind == ast.ObjectValue {
- for _, child := range value.Children {
- if value.Definition != nil {
- fieldDef := value.Definition.Fields.ForName(child.Name)
- if fieldDef != nil {
- child.Value.ExpectedType = fieldDef.Type
- child.Value.Definition = w.Schema.Types[fieldDef.Type.Name()]
- }
- }
- w.walkValue(child.Value)
- }
- }
-
- if value.Kind == ast.ListValue {
- for _, child := range value.Children {
- if value.ExpectedType != nil && value.ExpectedType.Elem != nil {
- child.Value.ExpectedType = value.ExpectedType.Elem
- child.Value.Definition = value.Definition
- }
-
- w.walkValue(child.Value)
- }
- }
-
- for _, v := range w.Observers.value {
- v(w, value)
- }
-}
-
-func (w *Walker) walkArgument(argDef *ast.ArgumentDefinition, arg *ast.Argument) {
- if argDef != nil {
- arg.Value.ExpectedType = argDef.Type
- arg.Value.Definition = w.Schema.Types[argDef.Type.Name()]
- }
-
- w.walkValue(arg.Value)
-}
-
-func (w *Walker) walkSelectionSet(parentDef *ast.Definition, it ast.SelectionSet) {
- for _, child := range it {
- w.walkSelection(parentDef, child)
- }
-}
-
-func (w *Walker) walkSelection(parentDef *ast.Definition, it ast.Selection) {
- switch it := it.(type) {
- case *ast.Field:
- var def *ast.FieldDefinition
- if it.Name == "__typename" {
- def = &ast.FieldDefinition{
- Name: "__typename",
- Type: ast.NamedType("String", nil),
- }
- } else if parentDef != nil {
- def = parentDef.Fields.ForName(it.Name)
- }
-
- it.Definition = def
- it.ObjectDefinition = parentDef
-
- var nextParentDef *ast.Definition
- if def != nil {
- nextParentDef = w.Schema.Types[def.Type.Name()]
- }
-
- for _, arg := range it.Arguments {
- var argDef *ast.ArgumentDefinition
- if def != nil {
- argDef = def.Arguments.ForName(arg.Name)
- }
-
- w.walkArgument(argDef, arg)
- }
-
- w.walkDirectives(nextParentDef, it.Directives, ast.LocationField)
- w.walkSelectionSet(nextParentDef, it.SelectionSet)
-
- for _, v := range w.Observers.field {
- v(w, it)
- }
-
- case *ast.InlineFragment:
- it.ObjectDefinition = parentDef
-
- nextParentDef := parentDef
- if it.TypeCondition != "" {
- nextParentDef = w.Schema.Types[it.TypeCondition]
- }
-
- w.walkDirectives(nextParentDef, it.Directives, ast.LocationInlineFragment)
- w.walkSelectionSet(nextParentDef, it.SelectionSet)
-
- for _, v := range w.Observers.inlineFragment {
- v(w, it)
- }
-
- case *ast.FragmentSpread:
- def := w.Document.Fragments.ForName(it.Name)
- it.Definition = def
- it.ObjectDefinition = parentDef
-
- var nextParentDef *ast.Definition
- if def != nil {
- nextParentDef = w.Schema.Types[def.TypeCondition]
- }
-
- w.walkDirectives(nextParentDef, it.Directives, ast.LocationFragmentSpread)
-
- if def != nil && !w.validatedFragmentSpreads[def.Name] {
- // prevent inifinite recursion
- w.validatedFragmentSpreads[def.Name] = true
- w.walkSelectionSet(nextParentDef, def.SelectionSet)
- }
-
- for _, v := range w.Observers.fragmentSpread {
- v(w, it)
- }
-
- default:
- panic(fmt.Errorf("unsupported %T", it))
- }
-}