aboutsummaryrefslogtreecommitdiffstats
path: root/vendor/github.com/vektah
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/vektah')
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/build.go165
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/codegen.go153
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/config.go184
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/enum.go12
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/enum_build.go39
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/import.go29
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/import_build.go116
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/input_build.go86
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/interface.go13
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/interface_build.go94
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/model.go15
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/models_build.go91
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/object.go206
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/object_build.go144
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/args.gotpl30
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/data.go11
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/field.gotpl80
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/generated.gotpl175
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/input.gotpl28
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/interface.gotpl18
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/models.gotpl65
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/object.gotpl52
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/templates.go139
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/type.go162
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/type_build.go112
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/util.go312
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/bool.go30
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/context.go145
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/defer.go30
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/error.go46
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/exec.go118
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/float.go26
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/id.go33
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/int.go26
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/jsonw.go83
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/map.go24
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/oneshot.go14
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/recovery.go19
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/response.go18
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/string.go63
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/time.go21
-rw-r--r--vendor/github.com/vektah/gqlgen/handler/graphql.go235
-rw-r--r--vendor/github.com/vektah/gqlgen/handler/playground.go51
-rw-r--r--vendor/github.com/vektah/gqlgen/handler/stub.go45
-rw-r--r--vendor/github.com/vektah/gqlgen/handler/websocket.go245
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/LICENSE24
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/directive.go32
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/lexer.go122
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/literals.go206
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/types.go80
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/values.go77
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/errors/errors.go41
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/introspection/introspection.go313
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/introspection/query.go104
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/query/query.go261
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/schema/meta.go193
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/schema/schema.go489
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/validation/suggestion.go71
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/validation/validation.go861
-rw-r--r--vendor/github.com/vektah/gqlparser/.gitignore5
-rw-r--r--vendor/github.com/vektah/gqlparser/.gometalinter.json13
-rw-r--r--vendor/github.com/vektah/gqlparser/LICENSE (renamed from vendor/github.com/vektah/gqlgen/LICENSE)2
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/argmap.go37
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/collections.go138
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/definition.go92
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/directive.go42
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/document.go65
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/dumper.go159
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/fragment.go38
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/operation.go29
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/selection.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/source.go14
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/type.go68
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/value.go120
-rw-r--r--vendor/github.com/vektah/gqlparser/gqlerror/error.go133
-rw-r--r--vendor/github.com/vektah/gqlparser/gqlparser.go42
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/blockstring.go58
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/lexer.go510
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml672
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/token.go148
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/parser.go112
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/query.go334
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/query_test.yml507
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/schema.go503
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/schema_test.yml505
-rw-r--r--vendor/github.com/vektah/gqlparser/readme.md17
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/error.go55
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/imported/LICENSE (renamed from vendor/github.com/vektah/gqlgen/neelance/tests/testdata/LICENSE)0
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/messaging.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/prelude.go5
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/prelude.graphql119
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go86
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go57
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go31
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go61
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go93
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go553
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go68
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go63
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go36
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go33
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go24
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go27
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go23
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go130
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go36
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/schema.go212
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/schema_test.yml152
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/suggestionList.go69
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/validator.go44
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/vars.go195
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/walk.go286
122 files changed, 7165 insertions, 6678 deletions
diff --git a/vendor/github.com/vektah/gqlgen/codegen/build.go b/vendor/github.com/vektah/gqlgen/codegen/build.go
deleted file mode 100644
index d56fc06f..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/build.go
+++ /dev/null
@@ -1,165 +0,0 @@
-package codegen
-
-import (
- "fmt"
- "go/build"
- "go/types"
- "os"
-
- "github.com/pkg/errors"
- "golang.org/x/tools/go/loader"
-)
-
-type Build struct {
- PackageName string
- Objects Objects
- Inputs Objects
- Interfaces []*Interface
- Imports []*Import
- QueryRoot *Object
- MutationRoot *Object
- SubscriptionRoot *Object
- SchemaRaw string
-}
-
-type ModelBuild struct {
- PackageName string
- Imports []*Import
- Models []Model
- Enums []Enum
-}
-
-// Create a list of models that need to be generated
-func (cfg *Config) models() (*ModelBuild, error) {
- namedTypes := cfg.buildNamedTypes()
-
- prog, err := cfg.loadProgram(namedTypes, true)
- if err != nil {
- return nil, errors.Wrap(err, "loading failed")
- }
- imports := buildImports(namedTypes, cfg.Model.Dir())
-
- cfg.bindTypes(imports, namedTypes, cfg.Model.Dir(), prog)
-
- models, err := cfg.buildModels(namedTypes, prog)
- if err != nil {
- return nil, err
- }
- return &ModelBuild{
- PackageName: cfg.Model.Package,
- Models: models,
- Enums: cfg.buildEnums(namedTypes),
- Imports: imports.finalize(),
- }, nil
-}
-
-// bind a schema together with some code to generate a Build
-func (cfg *Config) bind() (*Build, error) {
- namedTypes := cfg.buildNamedTypes()
-
- prog, err := cfg.loadProgram(namedTypes, true)
- if err != nil {
- return nil, errors.Wrap(err, "loading failed")
- }
-
- imports := buildImports(namedTypes, cfg.Exec.Dir())
- cfg.bindTypes(imports, namedTypes, cfg.Exec.Dir(), prog)
-
- objects, err := cfg.buildObjects(namedTypes, prog, imports)
- if err != nil {
- return nil, err
- }
-
- inputs, err := cfg.buildInputs(namedTypes, prog, imports)
- if err != nil {
- return nil, err
- }
-
- b := &Build{
- PackageName: cfg.Exec.Package,
- Objects: objects,
- Interfaces: cfg.buildInterfaces(namedTypes, prog),
- Inputs: inputs,
- Imports: imports.finalize(),
- SchemaRaw: cfg.SchemaStr,
- }
-
- if qr, ok := cfg.schema.EntryPoints["query"]; ok {
- b.QueryRoot = b.Objects.ByName(qr.TypeName())
- }
-
- if mr, ok := cfg.schema.EntryPoints["mutation"]; ok {
- b.MutationRoot = b.Objects.ByName(mr.TypeName())
- }
-
- if sr, ok := cfg.schema.EntryPoints["subscription"]; ok {
- b.SubscriptionRoot = b.Objects.ByName(sr.TypeName())
- }
-
- if b.QueryRoot == nil {
- return b, fmt.Errorf("query entry point missing")
- }
-
- // Poke a few magic methods into query
- q := b.Objects.ByName(b.QueryRoot.GQLType)
- q.Fields = append(q.Fields, Field{
- Type: &Type{namedTypes["__Schema"], []string{modPtr}, nil},
- GQLName: "__schema",
- NoErr: true,
- GoMethodName: "ec.introspectSchema",
- Object: q,
- })
- q.Fields = append(q.Fields, Field{
- Type: &Type{namedTypes["__Type"], []string{modPtr}, nil},
- GQLName: "__type",
- NoErr: true,
- GoMethodName: "ec.introspectType",
- Args: []FieldArgument{
- {GQLName: "name", Type: &Type{namedTypes["String"], []string{}, nil}, Object: &Object{}},
- },
- Object: q,
- })
-
- return b, nil
-}
-
-func (cfg *Config) validate() error {
- namedTypes := cfg.buildNamedTypes()
-
- _, err := cfg.loadProgram(namedTypes, false)
- return err
-}
-
-func (cfg *Config) loadProgram(namedTypes NamedTypes, allowErrors bool) (*loader.Program, error) {
- conf := loader.Config{}
- if allowErrors {
- conf = loader.Config{
- AllowErrors: true,
- TypeChecker: types.Config{
- Error: func(e error) {},
- },
- }
- }
- for _, imp := range ambientImports {
- conf.Import(imp)
- }
-
- for _, imp := range namedTypes {
- if imp.Package != "" {
- conf.Import(imp.Package)
- }
- }
-
- return conf.Load()
-}
-
-func resolvePkg(pkgName string) (string, error) {
- cwd, _ := os.Getwd()
-
- pkg, err := build.Default.Import(pkgName, cwd, build.FindOnly)
- if err != nil {
- return "", err
- }
-
- return pkg.ImportPath, nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/codegen.go b/vendor/github.com/vektah/gqlgen/codegen/codegen.go
deleted file mode 100644
index 789ef2ec..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/codegen.go
+++ /dev/null
@@ -1,153 +0,0 @@
-package codegen
-
-import (
- "bytes"
- "fmt"
- "io/ioutil"
- "os"
- "path/filepath"
- "regexp"
- "syscall"
-
- "github.com/pkg/errors"
- "github.com/vektah/gqlgen/codegen/templates"
- "github.com/vektah/gqlgen/neelance/schema"
- "golang.org/x/tools/imports"
-)
-
-func Generate(cfg Config) error {
- if err := cfg.normalize(); err != nil {
- return err
- }
-
- _ = syscall.Unlink(cfg.Exec.Filename)
- _ = syscall.Unlink(cfg.Model.Filename)
-
- modelsBuild, err := cfg.models()
- if err != nil {
- return errors.Wrap(err, "model plan failed")
- }
- if len(modelsBuild.Models) > 0 || len(modelsBuild.Enums) > 0 {
- var buf *bytes.Buffer
- buf, err = templates.Run("models.gotpl", modelsBuild)
- if err != nil {
- return errors.Wrap(err, "model generation failed")
- }
-
- if err = write(cfg.Model.Filename, buf.Bytes()); err != nil {
- return err
- }
- for _, model := range modelsBuild.Models {
- modelCfg := cfg.Models[model.GQLType]
- modelCfg.Model = cfg.Model.ImportPath() + "." + model.GoType
- cfg.Models[model.GQLType] = modelCfg
- }
-
- for _, enum := range modelsBuild.Enums {
- modelCfg := cfg.Models[enum.GQLType]
- modelCfg.Model = cfg.Model.ImportPath() + "." + enum.GoType
- cfg.Models[enum.GQLType] = modelCfg
- }
- }
-
- build, err := cfg.bind()
- if err != nil {
- return errors.Wrap(err, "exec plan failed")
- }
-
- var buf *bytes.Buffer
- buf, err = templates.Run("generated.gotpl", build)
- if err != nil {
- return errors.Wrap(err, "exec codegen failed")
- }
-
- if err = write(cfg.Exec.Filename, buf.Bytes()); err != nil {
- return err
- }
-
- if err = cfg.validate(); err != nil {
- return errors.Wrap(err, "validation failed")
- }
-
- return nil
-}
-
-func (cfg *Config) normalize() error {
- if err := cfg.Model.normalize(); err != nil {
- return errors.Wrap(err, "model")
- }
-
- if err := cfg.Exec.normalize(); err != nil {
- return errors.Wrap(err, "exec")
- }
-
- builtins := TypeMap{
- "__Directive": {Model: "github.com/vektah/gqlgen/neelance/introspection.Directive"},
- "__Type": {Model: "github.com/vektah/gqlgen/neelance/introspection.Type"},
- "__Field": {Model: "github.com/vektah/gqlgen/neelance/introspection.Field"},
- "__EnumValue": {Model: "github.com/vektah/gqlgen/neelance/introspection.EnumValue"},
- "__InputValue": {Model: "github.com/vektah/gqlgen/neelance/introspection.InputValue"},
- "__Schema": {Model: "github.com/vektah/gqlgen/neelance/introspection.Schema"},
- "Int": {Model: "github.com/vektah/gqlgen/graphql.Int"},
- "Float": {Model: "github.com/vektah/gqlgen/graphql.Float"},
- "String": {Model: "github.com/vektah/gqlgen/graphql.String"},
- "Boolean": {Model: "github.com/vektah/gqlgen/graphql.Boolean"},
- "ID": {Model: "github.com/vektah/gqlgen/graphql.ID"},
- "Time": {Model: "github.com/vektah/gqlgen/graphql.Time"},
- "Map": {Model: "github.com/vektah/gqlgen/graphql.Map"},
- }
-
- if cfg.Models == nil {
- cfg.Models = TypeMap{}
- }
- for typeName, entry := range builtins {
- if !cfg.Models.Exists(typeName) {
- cfg.Models[typeName] = entry
- }
- }
-
- cfg.schema = schema.New()
- return cfg.schema.Parse(cfg.SchemaStr)
-}
-
-var invalidPackageNameChar = regexp.MustCompile(`[^\w]`)
-
-func sanitizePackageName(pkg string) string {
- return invalidPackageNameChar.ReplaceAllLiteralString(filepath.Base(pkg), "_")
-}
-
-func abs(path string) string {
- absPath, err := filepath.Abs(path)
- if err != nil {
- panic(err)
- }
- return filepath.ToSlash(absPath)
-}
-
-func gofmt(filename string, b []byte) ([]byte, error) {
- out, err := imports.Process(filename, b, nil)
- if err != nil {
- return b, errors.Wrap(err, "unable to gofmt")
- }
- return out, nil
-}
-
-func write(filename string, b []byte) error {
- err := os.MkdirAll(filepath.Dir(filename), 0755)
- if err != nil {
- return errors.Wrap(err, "failed to create directory")
- }
-
- formatted, err := gofmt(filename, b)
- if err != nil {
- fmt.Fprintf(os.Stderr, "gofmt failed: %s\n", err.Error())
- formatted = b
- }
-
- err = ioutil.WriteFile(filename, formatted, 0644)
- if err != nil {
- return errors.Wrapf(err, "failed to write %s", filename)
- }
-
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/config.go b/vendor/github.com/vektah/gqlgen/codegen/config.go
deleted file mode 100644
index cd42ae6b..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/config.go
+++ /dev/null
@@ -1,184 +0,0 @@
-package codegen
-
-import (
- "fmt"
- "go/build"
- "io/ioutil"
- "os"
- "path/filepath"
- "strings"
-
- "github.com/pkg/errors"
- "github.com/vektah/gqlgen/neelance/schema"
- "gopkg.in/yaml.v2"
-)
-
-var defaults = Config{
- SchemaFilename: "schema.graphql",
- Model: PackageConfig{Filename: "models_gen.go"},
- Exec: PackageConfig{Filename: "generated.go"},
-}
-
-var cfgFilenames = []string{".gqlgen.yml", "gqlgen.yml", "gqlgen.yaml"}
-
-// LoadDefaultConfig looks for a config file in the current directory, and all parent directories
-// walking up the tree. The closest config file will be returned.
-func LoadDefaultConfig() (*Config, error) {
- cfgFile, err := findCfg()
- if err != nil || cfgFile == "" {
- cpy := defaults
- return &cpy, err
- }
-
- err = os.Chdir(filepath.Dir(cfgFile))
- if err != nil {
- return nil, errors.Wrap(err, "unable to enter config dir")
- }
- return LoadConfig(cfgFile)
-}
-
-// LoadConfig reads the gqlgen.yml config file
-func LoadConfig(filename string) (*Config, error) {
- config := defaults
-
- b, err := ioutil.ReadFile(filename)
- if err != nil {
- return nil, errors.Wrap(err, "unable to read config")
- }
-
- if err := yaml.UnmarshalStrict(b, &config); err != nil {
- return nil, errors.Wrap(err, "unable to parse config")
- }
-
- return &config, nil
-}
-
-type Config struct {
- SchemaFilename string `yaml:"schema,omitempty"`
- SchemaStr string `yaml:"-"`
- Exec PackageConfig `yaml:"exec"`
- Model PackageConfig `yaml:"model"`
- Models TypeMap `yaml:"models,omitempty"`
-
- schema *schema.Schema `yaml:"-"`
-}
-
-type PackageConfig struct {
- Filename string `yaml:"filename,omitempty"`
- Package string `yaml:"package,omitempty"`
-}
-
-type TypeMapEntry struct {
- Model string `yaml:"model"`
- Fields map[string]TypeMapField `yaml:"fields,omitempty"`
-}
-
-type TypeMapField struct {
- Resolver bool `yaml:"resolver"`
-}
-
-func (c *PackageConfig) normalize() error {
- if c.Filename == "" {
- return errors.New("Filename is required")
- }
- c.Filename = abs(c.Filename)
- // If Package is not set, first attempt to load the package at the output dir. If that fails
- // fallback to just the base dir name of the output filename.
- if c.Package == "" {
- cwd, _ := os.Getwd()
- pkg, _ := build.Default.Import(c.ImportPath(), cwd, 0)
- if pkg.Name != "" {
- c.Package = pkg.Name
- } else {
- c.Package = filepath.Base(c.Dir())
- }
- }
- c.Package = sanitizePackageName(c.Package)
- return nil
-}
-
-func (c *PackageConfig) ImportPath() string {
- dir := filepath.ToSlash(c.Dir())
- for _, gopath := range filepath.SplitList(build.Default.GOPATH) {
- gopath = filepath.ToSlash(gopath) + "/src/"
- if len(gopath) > len(dir) {
- continue
- }
- if strings.EqualFold(gopath, dir[0:len(gopath)]) {
- dir = dir[len(gopath):]
- break
- }
- }
- return dir
-}
-
-func (c *PackageConfig) Dir() string {
- return filepath.ToSlash(filepath.Dir(c.Filename))
-}
-
-func (c *PackageConfig) Check() error {
- if strings.ContainsAny(c.Package, "./\\") {
- return fmt.Errorf("package should be the output package name only, do not include the output filename")
- }
- if c.Filename != "" && !strings.HasSuffix(c.Filename, ".go") {
- return fmt.Errorf("filename should be path to a go source file")
- }
- return nil
-}
-
-func (cfg *Config) Check() error {
- if err := cfg.Models.Check(); err != nil {
- return errors.Wrap(err, "config.models")
- }
- if err := cfg.Exec.Check(); err != nil {
- return errors.Wrap(err, "config.exec")
- }
- if err := cfg.Model.Check(); err != nil {
- return errors.Wrap(err, "config.model")
- }
- return nil
-}
-
-type TypeMap map[string]TypeMapEntry
-
-func (tm TypeMap) Exists(typeName string) bool {
- _, ok := tm[typeName]
- return ok
-}
-
-func (tm TypeMap) Check() error {
- for typeName, entry := range tm {
- if strings.LastIndex(entry.Model, ".") < strings.LastIndex(entry.Model, "/") {
- return fmt.Errorf("model %s: invalid type specifier \"%s\" - you need to specify a struct to map to", typeName, entry.Model)
- }
- }
- return nil
-}
-
-// findCfg searches for the config file in this directory and all parents up the tree
-// looking for the closest match
-func findCfg() (string, error) {
- dir, err := os.Getwd()
- if err != nil {
- return "", errors.Wrap(err, "unable to get working dir to findCfg")
- }
-
- cfg := findCfgInDir(dir)
-
- for cfg == "" && dir != filepath.Dir(dir) {
- dir = filepath.Dir(dir)
- cfg = findCfgInDir(dir)
- }
-
- return cfg, nil
-}
-
-func findCfgInDir(dir string) string {
- for _, cfgName := range cfgFilenames {
- path := filepath.Join(dir, cfgName)
- if _, err := os.Stat(path); err == nil {
- return path
- }
- }
- return ""
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/enum.go b/vendor/github.com/vektah/gqlgen/codegen/enum.go
deleted file mode 100644
index e62fd2b1..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/enum.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package codegen
-
-type Enum struct {
- *NamedType
-
- Values []EnumValue
-}
-
-type EnumValue struct {
- Name string
- Description string
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/enum_build.go b/vendor/github.com/vektah/gqlgen/codegen/enum_build.go
deleted file mode 100644
index f2e6f63c..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/enum_build.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package codegen
-
-import (
- "sort"
- "strings"
-
- "github.com/vektah/gqlgen/codegen/templates"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-func (cfg *Config) buildEnums(types NamedTypes) []Enum {
- var enums []Enum
-
- for _, typ := range cfg.schema.Types {
- namedType := types[typ.TypeName()]
- e, isEnum := typ.(*schema.Enum)
- if !isEnum || strings.HasPrefix(typ.TypeName(), "__") || namedType.IsUserDefined {
- continue
- }
-
- var values []EnumValue
- for _, v := range e.Values {
- values = append(values, EnumValue{v.Name, v.Desc})
- }
-
- enum := Enum{
- NamedType: namedType,
- Values: values,
- }
- enum.GoType = templates.ToCamel(enum.GQLType)
- enums = append(enums, enum)
- }
-
- sort.Slice(enums, func(i, j int) bool {
- return strings.Compare(enums[i].GQLType, enums[j].GQLType) == -1
- })
-
- return enums
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/import.go b/vendor/github.com/vektah/gqlgen/codegen/import.go
deleted file mode 100644
index b511e8f6..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/import.go
+++ /dev/null
@@ -1,29 +0,0 @@
-package codegen
-
-import (
- "strconv"
-)
-
-type Import struct {
- Name string
- Path string
-
- alias string
-}
-
-type Imports struct {
- imports []*Import
- destDir string
-}
-
-func (i *Import) Write() string {
- return i.Alias() + " " + strconv.Quote(i.Path)
-}
-
-func (i *Import) Alias() string {
- if i.alias == "" {
- panic("alias called before imports are finalized")
- }
-
- return i.alias
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/import_build.go b/vendor/github.com/vektah/gqlgen/codegen/import_build.go
deleted file mode 100644
index f0877ed3..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/import_build.go
+++ /dev/null
@@ -1,116 +0,0 @@
-package codegen
-
-import (
- "fmt"
- "go/build"
- "sort"
- "strconv"
- "strings"
-)
-
-// These imports are referenced by the generated code, and are assumed to have the
-// default alias. So lets make sure they get added first, and any later collisions get
-// renamed.
-var ambientImports = []string{
- "context",
- "fmt",
- "io",
- "strconv",
- "time",
- "sync",
- "github.com/vektah/gqlgen/neelance/introspection",
- "github.com/vektah/gqlgen/neelance/errors",
- "github.com/vektah/gqlgen/neelance/query",
- "github.com/vektah/gqlgen/neelance/schema",
- "github.com/vektah/gqlgen/neelance/validation",
- "github.com/vektah/gqlgen/graphql",
-}
-
-func buildImports(types NamedTypes, destDir string) *Imports {
- imports := Imports{
- destDir: destDir,
- }
-
- for _, ambient := range ambientImports {
- imports.add(ambient)
- }
-
- // Imports from top level user types
- for _, t := range types {
- t.Import = imports.add(t.Package)
- }
-
- return &imports
-}
-
-func (s *Imports) add(path string) *Import {
- if path == "" {
- return nil
- }
-
- if stringHasSuffixFold(s.destDir, path) {
- return nil
- }
-
- if existing := s.findByPath(path); existing != nil {
- return existing
- }
-
- pkg, err := build.Default.Import(path, s.destDir, 0)
- if err != nil {
- panic(err)
- }
-
- imp := &Import{
- Name: pkg.Name,
- Path: path,
- }
- s.imports = append(s.imports, imp)
-
- return imp
-}
-
-func stringHasSuffixFold(s, suffix string) bool {
- return len(s) >= len(suffix) && strings.EqualFold(s[len(s)-len(suffix):], suffix)
-}
-
-func (s Imports) finalize() []*Import {
- // ensure stable ordering by sorting
- sort.Slice(s.imports, func(i, j int) bool {
- return s.imports[i].Path > s.imports[j].Path
- })
-
- for _, imp := range s.imports {
- alias := imp.Name
-
- i := 1
- for s.findByAlias(alias) != nil {
- alias = imp.Name + strconv.Itoa(i)
- i++
- if i > 10 {
- panic(fmt.Errorf("too many collisions, last attempt was %s", alias))
- }
- }
- imp.alias = alias
- }
-
- return s.imports
-}
-
-func (s Imports) findByPath(importPath string) *Import {
- for _, imp := range s.imports {
- if imp.Path == importPath {
- return imp
- }
- }
- return nil
-}
-
-func (s Imports) findByAlias(alias string) *Import {
- for _, imp := range s.imports {
- if imp.alias == alias {
- return imp
- }
- }
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/input_build.go b/vendor/github.com/vektah/gqlgen/codegen/input_build.go
deleted file mode 100644
index 98b25b8b..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/input_build.go
+++ /dev/null
@@ -1,86 +0,0 @@
-package codegen
-
-import (
- "go/types"
- "sort"
- "strings"
-
- "github.com/pkg/errors"
- "github.com/vektah/gqlgen/neelance/schema"
- "golang.org/x/tools/go/loader"
-)
-
-func (cfg *Config) buildInputs(namedTypes NamedTypes, prog *loader.Program, imports *Imports) (Objects, error) {
- var inputs Objects
-
- for _, typ := range cfg.schema.Types {
- switch typ := typ.(type) {
- case *schema.InputObject:
- input, err := buildInput(namedTypes, typ)
- if err != nil {
- return nil, err
- }
-
- def, err := findGoType(prog, input.Package, input.GoType)
- if err != nil {
- return nil, errors.Wrap(err, "cannot find type")
- }
- if def != nil {
- input.Marshaler = buildInputMarshaler(typ, def)
- bindErrs := bindObject(def.Type(), input, imports)
- if len(bindErrs) > 0 {
- return nil, bindErrs
- }
- }
-
- inputs = append(inputs, input)
- }
- }
-
- sort.Slice(inputs, func(i, j int) bool {
- return strings.Compare(inputs[i].GQLType, inputs[j].GQLType) == -1
- })
-
- return inputs, nil
-}
-
-func buildInput(types NamedTypes, typ *schema.InputObject) (*Object, error) {
- obj := &Object{NamedType: types[typ.TypeName()]}
-
- for _, field := range typ.Values {
- newField := Field{
- GQLName: field.Name.Name,
- Type: types.getType(field.Type),
- Object: obj,
- }
-
- if field.Default != nil {
- newField.Default = field.Default.Value(nil)
- }
-
- if !newField.Type.IsInput && !newField.Type.IsScalar {
- return nil, errors.Errorf("%s cannot be used as a field of %s. only input and scalar types are allowed", newField.GQLType, obj.GQLType)
- }
-
- obj.Fields = append(obj.Fields, newField)
-
- }
- return obj, nil
-}
-
-// if user has implemented an UnmarshalGQL method on the input type manually, use it
-// otherwise we will generate one.
-func buildInputMarshaler(typ *schema.InputObject, def types.Object) *Ref {
- switch def := def.(type) {
- case *types.TypeName:
- namedType := def.Type().(*types.Named)
- for i := 0; i < namedType.NumMethods(); i++ {
- method := namedType.Method(i)
- if method.Name() == "UnmarshalGQL" {
- return nil
- }
- }
- }
-
- return &Ref{GoType: typ.Name}
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/interface.go b/vendor/github.com/vektah/gqlgen/codegen/interface.go
deleted file mode 100644
index 2de0c88a..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/interface.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package codegen
-
-type Interface struct {
- *NamedType
-
- Implementors []InterfaceImplementor
-}
-
-type InterfaceImplementor struct {
- ValueReceiver bool
-
- *NamedType
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/interface_build.go b/vendor/github.com/vektah/gqlgen/codegen/interface_build.go
deleted file mode 100644
index cdf0f597..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/interface_build.go
+++ /dev/null
@@ -1,94 +0,0 @@
-package codegen
-
-import (
- "fmt"
- "go/types"
- "os"
- "sort"
- "strings"
-
- "github.com/vektah/gqlgen/neelance/schema"
- "golang.org/x/tools/go/loader"
-)
-
-func (cfg *Config) buildInterfaces(types NamedTypes, prog *loader.Program) []*Interface {
- var interfaces []*Interface
- for _, typ := range cfg.schema.Types {
- switch typ := typ.(type) {
- case *schema.Union, *schema.Interface:
- interfaces = append(interfaces, cfg.buildInterface(types, typ, prog))
- default:
- continue
- }
- }
-
- sort.Slice(interfaces, func(i, j int) bool {
- return strings.Compare(interfaces[i].GQLType, interfaces[j].GQLType) == -1
- })
-
- return interfaces
-}
-
-func (cfg *Config) buildInterface(types NamedTypes, typ schema.NamedType, prog *loader.Program) *Interface {
- switch typ := typ.(type) {
-
- case *schema.Union:
- i := &Interface{NamedType: types[typ.TypeName()]}
-
- for _, implementor := range typ.PossibleTypes {
- t := types[implementor.TypeName()]
-
- i.Implementors = append(i.Implementors, InterfaceImplementor{
- NamedType: t,
- ValueReceiver: cfg.isValueReceiver(types[typ.Name], t, prog),
- })
- }
-
- return i
-
- case *schema.Interface:
- i := &Interface{NamedType: types[typ.TypeName()]}
-
- for _, implementor := range typ.PossibleTypes {
- t := types[implementor.TypeName()]
-
- i.Implementors = append(i.Implementors, InterfaceImplementor{
- NamedType: t,
- ValueReceiver: cfg.isValueReceiver(types[typ.Name], t, prog),
- })
- }
-
- return i
- default:
- panic(fmt.Errorf("unknown interface %#v", typ))
- }
-}
-
-func (cfg *Config) isValueReceiver(intf *NamedType, implementor *NamedType, prog *loader.Program) bool {
- interfaceType, err := findGoInterface(prog, intf.Package, intf.GoType)
- if interfaceType == nil || err != nil {
- return true
- }
-
- implementorType, err := findGoNamedType(prog, implementor.Package, implementor.GoType)
- if implementorType == nil || err != nil {
- return true
- }
-
- for i := 0; i < interfaceType.NumMethods(); i++ {
- intfMethod := interfaceType.Method(i)
-
- implMethod := findMethod(implementorType, intfMethod.Name())
- if implMethod == nil {
- fmt.Fprintf(os.Stderr, "missing method %s on %s\n", intfMethod.Name(), implementor.GoType)
- return false
- }
-
- sig := implMethod.Type().(*types.Signature)
- if _, isPtr := sig.Recv().Type().(*types.Pointer); isPtr {
- return false
- }
- }
-
- return true
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/model.go b/vendor/github.com/vektah/gqlgen/codegen/model.go
deleted file mode 100644
index 164a04d5..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/model.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package codegen
-
-type Model struct {
- *NamedType
-
- Fields []ModelField
-}
-
-type ModelField struct {
- *Type
- GQLName string
- GoVarName string
- GoFKName string
- GoFKType string
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/models_build.go b/vendor/github.com/vektah/gqlgen/codegen/models_build.go
deleted file mode 100644
index 211d4bd4..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/models_build.go
+++ /dev/null
@@ -1,91 +0,0 @@
-package codegen
-
-import (
- "sort"
- "strings"
-
- "github.com/vektah/gqlgen/neelance/schema"
- "golang.org/x/tools/go/loader"
-)
-
-func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model, error) {
- var models []Model
-
- for _, typ := range cfg.schema.Types {
- var model Model
- switch typ := typ.(type) {
- case *schema.Object:
- obj, err := cfg.buildObject(types, typ)
- if err != nil {
- return nil, err
- }
- if obj.Root || obj.IsUserDefined {
- continue
- }
- model = cfg.obj2Model(obj)
- case *schema.InputObject:
- obj, err := buildInput(types, typ)
- if err != nil {
- return nil, err
- }
- if obj.IsUserDefined {
- continue
- }
- model = cfg.obj2Model(obj)
- case *schema.Interface, *schema.Union:
- intf := cfg.buildInterface(types, typ, prog)
- if intf.IsUserDefined {
- continue
- }
- model = int2Model(intf)
- default:
- continue
- }
-
- models = append(models, model)
- }
-
- sort.Slice(models, func(i, j int) bool {
- return strings.Compare(models[i].GQLType, models[j].GQLType) == -1
- })
-
- return models, nil
-}
-
-func (cfg *Config) obj2Model(obj *Object) Model {
- model := Model{
- NamedType: obj.NamedType,
- Fields: []ModelField{},
- }
-
- model.GoType = ucFirst(obj.GQLType)
- model.Marshaler = &Ref{GoType: obj.GoType}
-
- for i := range obj.Fields {
- field := &obj.Fields[i]
- mf := ModelField{Type: field.Type, GQLName: field.GQLName}
-
- mf.GoVarName = ucFirst(field.GQLName)
- if mf.IsScalar {
- if mf.GoVarName == "Id" {
- mf.GoVarName = "ID"
- }
- }
-
- model.Fields = append(model.Fields, mf)
- }
-
- return model
-}
-
-func int2Model(obj *Interface) Model {
- model := Model{
- NamedType: obj.NamedType,
- Fields: []ModelField{},
- }
-
- model.GoType = ucFirst(obj.GQLType)
- model.Marshaler = &Ref{GoType: obj.GoType}
-
- return model
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/object.go b/vendor/github.com/vektah/gqlgen/codegen/object.go
deleted file mode 100644
index 1c03c0ba..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/object.go
+++ /dev/null
@@ -1,206 +0,0 @@
-package codegen
-
-import (
- "bytes"
- "fmt"
- "strconv"
- "strings"
- "text/template"
- "unicode"
-)
-
-type Object struct {
- *NamedType
-
- Fields []Field
- Satisfies []string
- Root bool
- DisableConcurrency bool
- Stream bool
-}
-
-type Field struct {
- *Type
-
- GQLName string // The name of the field in graphql
- GoMethodName string // The name of the method in go, if any
- GoVarName string // The name of the var in go, if any
- Args []FieldArgument // A list of arguments to be passed to this field
- ForceResolver bool // Should be emit Resolver method
- NoErr bool // If this is bound to a go method, does that method have an error as the second argument
- Object *Object // A link back to the parent object
- Default interface{} // The default value
-}
-
-type FieldArgument struct {
- *Type
-
- GQLName string // The name of the argument in graphql
- GoVarName string // The name of the var in go
- Object *Object // A link back to the parent object
- Default interface{} // The default value
-}
-
-type Objects []*Object
-
-func (o *Object) Implementors() string {
- satisfiedBy := strconv.Quote(o.GQLType)
- for _, s := range o.Satisfies {
- satisfiedBy += ", " + strconv.Quote(s)
- }
- return "[]string{" + satisfiedBy + "}"
-}
-
-func (o *Object) HasResolvers() bool {
- for _, f := range o.Fields {
- if f.IsResolver() {
- return true
- }
- }
- return false
-}
-
-func (f *Field) IsResolver() bool {
- return f.ForceResolver || f.GoMethodName == "" && f.GoVarName == ""
-}
-
-func (f *Field) IsConcurrent() bool {
- return f.IsResolver() && !f.Object.DisableConcurrency
-}
-func (f *Field) ShortInvocation() string {
- if !f.IsResolver() {
- return ""
- }
- shortName := strings.ToUpper(f.GQLName[:1]) + f.GQLName[1:]
- res := fmt.Sprintf("%s().%s(ctx", f.Object.GQLType, shortName)
- if !f.Object.Root {
- res += fmt.Sprintf(", obj")
- }
- for _, arg := range f.Args {
- res += fmt.Sprintf(", %s", arg.GoVarName)
- }
- res += ")"
- return res
-}
-func (f *Field) ShortResolverDeclaration() string {
- if !f.IsResolver() {
- return ""
- }
- decl := strings.TrimPrefix(f.ResolverDeclaration(), f.Object.GQLType+"_")
- return strings.ToUpper(decl[:1]) + decl[1:]
-}
-
-func (f *Field) ResolverDeclaration() string {
- if !f.IsResolver() {
- return ""
- }
- res := fmt.Sprintf("%s_%s(ctx context.Context", f.Object.GQLType, f.GQLName)
-
- if !f.Object.Root {
- res += fmt.Sprintf(", obj *%s", f.Object.FullName())
- }
- for _, arg := range f.Args {
- res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
- }
-
- result := f.Signature()
- if f.Object.Stream {
- result = "<-chan " + result
- }
-
- res += fmt.Sprintf(") (%s, error)", result)
- return res
-}
-
-func (f *Field) CallArgs() string {
- var args []string
-
- if f.GoMethodName == "" {
- args = append(args, "ctx")
-
- if !f.Object.Root {
- args = append(args, "obj")
- }
- }
-
- for _, arg := range f.Args {
- args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
- }
-
- return strings.Join(args, ", ")
-}
-
-// should be in the template, but its recursive and has a bunch of args
-func (f *Field) WriteJson() string {
- return f.doWriteJson("res", f.Type.Modifiers, false, 1)
-}
-
-func (f *Field) doWriteJson(val string, remainingMods []string, isPtr bool, depth int) string {
- switch {
- case len(remainingMods) > 0 && remainingMods[0] == modPtr:
- return fmt.Sprintf("if %s == nil { return graphql.Null }\n%s", val, f.doWriteJson(val, remainingMods[1:], true, depth+1))
-
- case len(remainingMods) > 0 && remainingMods[0] == modList:
- if isPtr {
- val = "*" + val
- }
- var arr = "arr" + strconv.Itoa(depth)
- var index = "idx" + strconv.Itoa(depth)
-
- return tpl(`{{.arr}} := graphql.Array{}
- for {{.index}} := range {{.val}} {
- {{.arr}} = append({{.arr}}, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex({{.index}})
- defer rctx.Pop()
- {{ .next }}
- }())
- }
- return {{.arr}}`, map[string]interface{}{
- "val": val,
- "arr": arr,
- "index": index,
- "next": f.doWriteJson(val+"["+index+"]", remainingMods[1:], false, depth+1),
- })
-
- case f.IsScalar:
- if isPtr {
- val = "*" + val
- }
- return f.Marshal(val)
-
- default:
- if !isPtr {
- val = "&" + val
- }
- return fmt.Sprintf("return ec._%s(ctx, field.Selections, %s)", f.GQLType, val)
- }
-}
-
-func (os Objects) ByName(name string) *Object {
- for i, o := range os {
- if strings.EqualFold(o.GQLType, name) {
- return os[i]
- }
- }
- return nil
-}
-
-func tpl(tpl string, vars map[string]interface{}) string {
- b := &bytes.Buffer{}
- err := template.Must(template.New("inline").Parse(tpl)).Execute(b, vars)
- if err != nil {
- panic(err)
- }
- return b.String()
-}
-
-func ucFirst(s string) string {
- if s == "" {
- return ""
- }
-
- r := []rune(s)
- r[0] = unicode.ToUpper(r[0])
- return string(r)
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/object_build.go b/vendor/github.com/vektah/gqlgen/codegen/object_build.go
deleted file mode 100644
index 0ef40fef..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/object_build.go
+++ /dev/null
@@ -1,144 +0,0 @@
-package codegen
-
-import (
- "log"
- "sort"
- "strings"
-
- "github.com/pkg/errors"
- "github.com/vektah/gqlgen/neelance/schema"
- "golang.org/x/tools/go/loader"
-)
-
-func (cfg *Config) buildObjects(types NamedTypes, prog *loader.Program, imports *Imports) (Objects, error) {
- var objects Objects
-
- for _, typ := range cfg.schema.Types {
- switch typ := typ.(type) {
- case *schema.Object:
- obj, err := cfg.buildObject(types, typ)
- if err != nil {
- return nil, err
- }
-
- def, err := findGoType(prog, obj.Package, obj.GoType)
- if err != nil {
- return nil, err
- }
- if def != nil {
- for _, bindErr := range bindObject(def.Type(), obj, imports) {
- log.Println(bindErr.Error())
- log.Println(" Adding resolver method")
- }
- }
-
- objects = append(objects, obj)
- }
- }
-
- sort.Slice(objects, func(i, j int) bool {
- return strings.Compare(objects[i].GQLType, objects[j].GQLType) == -1
- })
-
- return objects, nil
-}
-
-var keywords = []string{
- "break",
- "default",
- "func",
- "interface",
- "select",
- "case",
- "defer",
- "go",
- "map",
- "struct",
- "chan",
- "else",
- "goto",
- "package",
- "switch",
- "const",
- "fallthrough",
- "if",
- "range",
- "type",
- "continue",
- "for",
- "import",
- "return",
- "var",
-}
-
-func sanitizeGoName(name string) string {
- for _, k := range keywords {
- if name == k {
- return name + "_"
- }
- }
- return name
-}
-
-func (cfg *Config) buildObject(types NamedTypes, typ *schema.Object) (*Object, error) {
- obj := &Object{NamedType: types[typ.TypeName()]}
- typeEntry, entryExists := cfg.Models[typ.TypeName()]
-
- for _, i := range typ.Interfaces {
- obj.Satisfies = append(obj.Satisfies, i.Name)
- }
-
- for _, field := range typ.Fields {
-
- var forceResolver bool
- if entryExists {
- if typeField, ok := typeEntry.Fields[field.Name]; ok {
- forceResolver = typeField.Resolver
- }
- }
-
- var args []FieldArgument
- for _, arg := range field.Args {
- newArg := FieldArgument{
- GQLName: arg.Name.Name,
- Type: types.getType(arg.Type),
- Object: obj,
- GoVarName: sanitizeGoName(arg.Name.Name),
- }
-
- if !newArg.Type.IsInput && !newArg.Type.IsScalar {
- return nil, errors.Errorf("%s cannot be used as argument of %s.%s. only input and scalar types are allowed", arg.Type, obj.GQLType, field.Name)
- }
-
- if arg.Default != nil {
- newArg.Default = arg.Default.Value(nil)
- newArg.StripPtr()
- }
- args = append(args, newArg)
- }
-
- obj.Fields = append(obj.Fields, Field{
- GQLName: field.Name,
- Type: types.getType(field.Type),
- Args: args,
- Object: obj,
- ForceResolver: forceResolver,
- })
- }
-
- for name, typ := range cfg.schema.EntryPoints {
- schemaObj := typ.(*schema.Object)
- if schemaObj.TypeName() != obj.GQLType {
- continue
- }
-
- obj.Root = true
- if name == "mutation" {
- obj.DisableConcurrency = true
- }
- if name == "subscription" {
- obj.Stream = true
- }
- }
- return obj, nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/args.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/args.gotpl
deleted file mode 100644
index f53aceec..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/args.gotpl
+++ /dev/null
@@ -1,30 +0,0 @@
- {{- if . }}args := map[string]interface{}{} {{end}}
- {{- range $i, $arg := . }}
- var arg{{$i}} {{$arg.Signature }}
- if tmp, ok := field.Args[{{$arg.GQLName|quote}}]; ok {
- var err error
- {{$arg.Unmarshal (print "arg" $i) "tmp" }}
- if err != nil {
- ec.Error(ctx, err)
- {{- if $arg.Object.Stream }}
- return nil
- {{- else }}
- return graphql.Null
- {{- end }}
- }
- } {{ if $arg.Default }} else {
- var tmp interface{} = {{ $arg.Default | dump }}
- var err error
- {{$arg.Unmarshal (print "arg" $i) "tmp" }}
- if err != nil {
- ec.Error(ctx, err)
- {{- if $arg.Object.Stream }}
- return nil
- {{- else }}
- return graphql.Null
- {{- end }}
- }
- }
- {{end }}
- args[{{$arg.GQLName|quote}}] = arg{{$i}}
- {{- end -}}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/data.go b/vendor/github.com/vektah/gqlgen/codegen/templates/data.go
deleted file mode 100644
index d6da4807..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/data.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package templates
-
-var data = map[string]string{
- "args.gotpl": "\t{{- if . }}args := map[string]interface{}{} {{end}}\n\t{{- range $i, $arg := . }}\n\t\tvar arg{{$i}} {{$arg.Signature }}\n\t\tif tmp, ok := field.Args[{{$arg.GQLName|quote}}]; ok {\n\t\t\tvar err error\n\t\t\t{{$arg.Unmarshal (print \"arg\" $i) \"tmp\" }}\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\t{{- if $arg.Object.Stream }}\n\t\t\t\t\treturn nil\n\t\t\t\t{{- else }}\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t{{- end }}\n\t\t\t}\n\t\t} {{ if $arg.Default }} else {\n\t\t\tvar tmp interface{} = {{ $arg.Default | dump }}\n\t\t\tvar err error\n\t\t\t{{$arg.Unmarshal (print \"arg\" $i) \"tmp\" }}\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\t{{- if $arg.Object.Stream }}\n\t\t\t\t\treturn nil\n\t\t\t\t{{- else }}\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t{{- end }}\n\t\t\t}\n\t\t}\n\t\t{{end }}\n\t\targs[{{$arg.GQLName|quote}}] = arg{{$i}}\n\t{{- end -}}\n",
- "field.gotpl": "{{ $field := . }}\n{{ $object := $field.Object }}\n\n{{- if $object.Stream }}\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {\n\t\t{{- template \"args.gotpl\" $field.Args }}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{Field: field})\n\t\tresults, err := ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})\n\t\tif err != nil {\n\t\t\tec.Error(ctx, err)\n\t\t\treturn nil\n\t\t}\n\t\treturn func() graphql.Marshaler {\n\t\t\tres, ok := <-results\n\t\t\tif !ok {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tvar out graphql.OrderedMap\n\t\t\tout.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())\n\t\t\treturn &out\n\t\t}\n\t}\n{{ else }}\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {\n\t\t{{- template \"args.gotpl\" $field.Args }}\n\n\t\t{{- if $field.IsConcurrent }}\n\t\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\t\tObject: {{$object.GQLType|quote}},\n\t\t\t\tArgs: {{if $field.Args }}args{{else}}nil{{end}},\n\t\t\t\tField: field,\n\t\t\t})\n\t\t\treturn graphql.Defer(func() (ret graphql.Marshaler) {\n\t\t\t\tdefer func() {\n\t\t\t\t\tif r := recover(); r != nil {\n\t\t\t\t\t\tuserErr := ec.Recover(ctx, r)\n\t\t\t\t\t\tec.Error(ctx, userErr)\n\t\t\t\t\t\tret = graphql.Null\n\t\t\t\t\t}\n\t\t\t\t}()\n\t\t{{ else }}\n\t\t\trctx := graphql.GetResolverContext(ctx)\n\t\t\trctx.Object = {{$object.GQLType|quote}}\n\t\t\trctx.Args = {{if $field.Args }}args{{else}}nil{{end}}\n\t\t\trctx.Field = field\n\t\t\trctx.PushField(field.Alias)\n\t\t\tdefer rctx.Pop()\n\t\t{{- end }}\n\n\t\t\t{{- if $field.IsResolver }}\n\t\t\t\tresTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {\n\t\t\t\t\treturn ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\tec.Error(ctx, err)\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t}\n\t\t\t\tif resTmp == nil {\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t}\n\t\t\t\tres := resTmp.({{$field.Signature}})\n\t\t\t{{- else if $field.GoVarName }}\n\t\t\t\tres := obj.{{$field.GoVarName}}\n\t\t\t{{- else if $field.GoMethodName }}\n\t\t\t\t{{- if $field.NoErr }}\n\t\t\t\t\tres := {{$field.GoMethodName}}({{ $field.CallArgs }})\n\t\t\t\t{{- else }}\n\t\t\t\t\tres, err := {{$field.GoMethodName}}({{ $field.CallArgs }})\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tec.Error(ctx, err)\n\t\t\t\t\t\treturn graphql.Null\n\t\t\t\t\t}\n\t\t\t\t{{- end }}\n\t\t\t{{- end }}\n\t\t\t{{ $field.WriteJson }}\n\t\t{{- if $field.IsConcurrent }}\n\t\t\t})\n\t\t{{- end }}\n\t}\n{{ end }}\n",
- "generated.gotpl": "// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\n// MakeExecutableSchema creates an ExecutableSchema from the Resolvers interface.\nfunc MakeExecutableSchema(resolvers Resolvers) graphql.ExecutableSchema {\n\treturn &executableSchema{resolvers: resolvers}\n}\n\n// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.\nfunc NewExecutableSchema(resolvers ResolverRoot) graphql.ExecutableSchema {\n\treturn MakeExecutableSchema(shortMapper{r: resolvers})\n}\n\ntype Resolvers interface {\n{{- range $object := .Objects -}}\n\t{{ range $field := $object.Fields -}}\n\t\t{{ $field.ResolverDeclaration }}\n\t{{ end }}\n{{- end }}\n}\n\ntype ResolverRoot interface {\n{{- range $object := .Objects -}}\n\t{{ if $object.HasResolvers -}}\n\t\t{{$object.GQLType}}() {{$object.GQLType}}Resolver\n\t{{ end }}\n{{- end }}\n}\n\n{{- range $object := .Objects -}}\n\t{{ if $object.HasResolvers }}\n\t\ttype {{$object.GQLType}}Resolver interface {\n\t\t{{ range $field := $object.Fields -}}\n\t\t\t{{ $field.ShortResolverDeclaration }}\n\t\t{{ end }}\n\t\t}\n\t{{- end }}\n{{- end }}\n\ntype shortMapper struct {\n\tr ResolverRoot\n}\n\n{{- range $object := .Objects -}}\n\t{{ range $field := $object.Fields -}}\n\t\t{{- if $field.IsResolver }}\n\t\t\tfunc (s shortMapper) {{ $field.ResolverDeclaration }} {\n\t\t\t\treturn s.r.{{$field.ShortInvocation}}\n\t\t\t}\n\t\t{{- end }}\n\t{{ end }}\n{{- end }}\n\ntype executableSchema struct {\n\tresolvers Resolvers\n}\n\nfunc (e *executableSchema) Schema() *schema.Schema {\n\treturn parsedSchema\n}\n\nfunc (e *executableSchema) Query(ctx context.Context, op *query.Operation) *graphql.Response {\n\t{{- if .QueryRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.QueryRoot.GQLType}}(ctx, op.Selections)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"queries are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {\n\t{{- if .MutationRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.MutationRoot.GQLType}}(ctx, op.Selections)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"mutations are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {\n\t{{- if .SubscriptionRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}\n\n\t\tnext := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.Selections)\n\t\tif ec.Errors != nil {\n\t\t\treturn graphql.OneShot(&graphql.Response{Data: []byte(\"null\"), Errors: ec.Errors})\n\t\t}\n\n\t\tvar buf bytes.Buffer\n\t\treturn func() *graphql.Response {\n\t\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\t\tbuf.Reset()\n\t\t\t\tdata := next()\n\n\t\t\t\tif data == nil {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\tdata.MarshalGQL(&buf)\n\t\t\t\treturn buf.Bytes()\n\t\t\t})\n\n\t\t\treturn &graphql.Response{\n\t\t\t\tData: buf,\n\t\t\t\tErrors: ec.Errors,\n\t\t\t}\n\t\t}\n\t{{- else }}\n\t\treturn graphql.OneShot(graphql.ErrorResponse(ctx, \"subscriptions are not supported\"))\n\t{{- end }}\n}\n\ntype executionContext struct {\n\t*graphql.RequestContext\n\n\tresolvers Resolvers\n}\n\n{{- range $object := .Objects }}\n\t{{ template \"object.gotpl\" $object }}\n\n\t{{- range $field := $object.Fields }}\n\t\t{{ template \"field.gotpl\" $field }}\n\t{{ end }}\n{{- end}}\n\n{{- range $interface := .Interfaces }}\n\t{{ template \"interface.gotpl\" $interface }}\n{{- end }}\n\n{{- range $input := .Inputs }}\n\t{{ template \"input.gotpl\" $input }}\n{{- end }}\n\nfunc (ec *executionContext) introspectSchema() *introspection.Schema {\n\treturn introspection.WrapSchema(parsedSchema)\n}\n\nfunc (ec *executionContext) introspectType(name string) *introspection.Type {\n\tt := parsedSchema.Resolve(name)\n\tif t == nil {\n\t\treturn nil\n\t}\n\treturn introspection.WrapType(t)\n}\n\nvar parsedSchema = schema.MustParse({{.SchemaRaw|rawQuote}})\n",
- "input.gotpl": "\t{{- if .IsMarshaled }}\n\tfunc Unmarshal{{ .GQLType }}(v interface{}) ({{.FullName}}, error) {\n\t\tvar it {{.FullName}}\n\t\tvar asMap = v.(map[string]interface{})\n\t\t{{ range $field := .Fields}}\n\t\t\t{{- if $field.Default}}\n\t\t\t\tif _, present := asMap[{{$field.GQLName|quote}}] ; !present {\n\t\t\t\t\tasMap[{{$field.GQLName|quote}}] = {{ $field.Default | dump }}\n\t\t\t\t}\n\t\t\t{{- end}}\n\t\t{{- end }}\n\n\t\tfor k, v := range asMap {\n\t\t\tswitch k {\n\t\t\t{{- range $field := .Fields }}\n\t\t\tcase {{$field.GQLName|quote}}:\n\t\t\t\tvar err error\n\t\t\t\t{{ $field.Unmarshal (print \"it.\" $field.GoVarName) \"v\" }}\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn it, err\n\t\t\t\t}\n\t\t\t{{- end }}\n\t\t\t}\n\t\t}\n\n\t\treturn it, nil\n\t}\n\t{{- end }}\n",
- "interface.gotpl": "{{- $interface := . }}\n\nfunc (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel []query.Selection, obj *{{$interface.FullName}}) graphql.Marshaler {\n\tswitch obj := (*obj).(type) {\n\tcase nil:\n\t\treturn graphql.Null\n\t{{- range $implementor := $interface.Implementors }}\n\t\t{{- if $implementor.ValueReceiver }}\n\t\t\tcase {{$implementor.FullName}}:\n\t\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, &obj)\n\t\t{{- end}}\n\t\tcase *{{$implementor.FullName}}:\n\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, obj)\n\t{{- end }}\n\tdefault:\n\t\tpanic(fmt.Errorf(\"unexpected type %T\", obj))\n\t}\n}\n",
- "models.gotpl": "// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\n{{ range $model := .Models }}\n\t{{- if .IsInterface }}\n\t\ttype {{.GoType}} interface {}\n\t{{- else }}\n\t\ttype {{.GoType}} struct {\n\t\t\t{{- range $field := .Fields }}\n\t\t\t\t{{- if $field.GoVarName }}\n\t\t\t\t\t{{ $field.GoVarName }} {{$field.Signature}} `json:\"{{$field.GQLName}}\"`\n\t\t\t\t{{- else }}\n\t\t\t\t\t{{ $field.GoFKName }} {{$field.GoFKType}}\n\t\t\t\t{{- end }}\n\t\t\t{{- end }}\n\t\t}\n\t{{- end }}\n{{- end}}\n\n{{ range $enum := .Enums }}\n\ttype {{.GoType}} string\n\tconst (\n\t{{ range $value := .Values -}}\n\t\t{{with .Description}} {{.|prefixLines \"// \"}} {{end}}\n\t\t{{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}}\n\t{{- end }}\n\t)\n\n\tfunc (e {{.GoType}}) IsValid() bool {\n\t\tswitch e {\n\t\tcase {{ range $index, $element := .Values}}{{if $index}},{{end}}{{ $enum.GoType }}{{ $element.Name|toCamel }}{{end}}:\n\t\t\treturn true\n\t\t}\n\t\treturn false\n\t}\n\n\tfunc (e {{.GoType}}) String() string {\n\t\treturn string(e)\n\t}\n\n\tfunc (e *{{.GoType}}) UnmarshalGQL(v interface{}) error {\n\t\tstr, ok := v.(string)\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"enums must be strings\")\n\t\t}\n\n\t\t*e = {{.GoType}}(str)\n\t\tif !e.IsValid() {\n\t\t\treturn fmt.Errorf(\"%s is not a valid {{.GQLType}}\", str)\n\t\t}\n\t\treturn nil\n\t}\n\n\tfunc (e {{.GoType}}) MarshalGQL(w io.Writer) {\n\t\tfmt.Fprint(w, strconv.Quote(e.String()))\n\t}\n\n{{- end }}\n",
- "object.gotpl": "{{ $object := . }}\n\nvar {{ $object.GQLType|lcFirst}}Implementors = {{$object.Implementors}}\n\n// nolint: gocyclo, errcheck, gas, goconst\n{{- if .Stream }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection) func() graphql.Marshaler {\n\tfields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)\n\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\tObject: {{$object.GQLType|quote}},\n\t})\n\tif len(fields) != 1 {\n\t\tec.Errorf(ctx, \"must subscribe to exactly one stream\")\n\t\treturn nil\n\t}\n\n\tswitch fields[0].Name {\n\t{{- range $field := $object.Fields }}\n\tcase \"{{$field.GQLName}}\":\n\t\treturn ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, fields[0])\n\t{{- end }}\n\tdefault:\n\t\tpanic(\"unknown field \" + strconv.Quote(fields[0].Name))\n\t}\n}\n{{- else }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {\n\tfields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)\n\t{{if $object.Root}}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\tObject: {{$object.GQLType|quote}},\n\t\t})\n\t{{end}}\n\tout := graphql.NewOrderedMap(len(fields))\n\tfor i, field := range fields {\n\t\tout.Keys[i] = field.Alias\n\n\t\tswitch field.Name {\n\t\tcase \"__typename\":\n\t\t\tout.Values[i] = graphql.MarshalString({{$object.GQLType|quote}})\n\t\t{{- range $field := $object.Fields }}\n\t\tcase \"{{$field.GQLName}}\":\n\t\t\tout.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})\n\t\t{{- end }}\n\t\tdefault:\n\t\t\tpanic(\"unknown field \" + strconv.Quote(field.Name))\n\t\t}\n\t}\n\n\treturn out\n}\n{{- end }}\n",
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/field.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/field.gotpl
deleted file mode 100644
index 4279ad8e..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/field.gotpl
+++ /dev/null
@@ -1,80 +0,0 @@
-{{ $field := . }}
-{{ $object := $field.Object }}
-
-{{- if $object.Stream }}
- func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {
- {{- template "args.gotpl" $field.Args }}
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{Field: field})
- results, err := ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})
- if err != nil {
- ec.Error(ctx, err)
- return nil
- }
- return func() graphql.Marshaler {
- res, ok := <-results
- if !ok {
- return nil
- }
- var out graphql.OrderedMap
- out.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())
- return &out
- }
- }
-{{ else }}
- func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {
- {{- template "args.gotpl" $field.Args }}
-
- {{- if $field.IsConcurrent }}
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
- Object: {{$object.GQLType|quote}},
- Args: {{if $field.Args }}args{{else}}nil{{end}},
- Field: field,
- })
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
- {{ else }}
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = {{$object.GQLType|quote}}
- rctx.Args = {{if $field.Args }}args{{else}}nil{{end}}
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- {{- end }}
-
- {{- if $field.IsResolver }}
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
- }
- res := resTmp.({{$field.Signature}})
- {{- else if $field.GoVarName }}
- res := obj.{{$field.GoVarName}}
- {{- else if $field.GoMethodName }}
- {{- if $field.NoErr }}
- res := {{$field.GoMethodName}}({{ $field.CallArgs }})
- {{- else }}
- res, err := {{$field.GoMethodName}}({{ $field.CallArgs }})
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- {{- end }}
- {{- end }}
- {{ $field.WriteJson }}
- {{- if $field.IsConcurrent }}
- })
- {{- end }}
- }
-{{ end }}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/generated.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/generated.gotpl
deleted file mode 100644
index cc1dc459..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/generated.gotpl
+++ /dev/null
@@ -1,175 +0,0 @@
-// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.
-
-package {{ .PackageName }}
-
-import (
-{{- range $import := .Imports }}
- {{- $import.Write }}
-{{ end }}
-)
-
-// MakeExecutableSchema creates an ExecutableSchema from the Resolvers interface.
-func MakeExecutableSchema(resolvers Resolvers) graphql.ExecutableSchema {
- return &executableSchema{resolvers: resolvers}
-}
-
-// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
-func NewExecutableSchema(resolvers ResolverRoot) graphql.ExecutableSchema {
- return MakeExecutableSchema(shortMapper{r: resolvers})
-}
-
-type Resolvers interface {
-{{- range $object := .Objects -}}
- {{ range $field := $object.Fields -}}
- {{ $field.ResolverDeclaration }}
- {{ end }}
-{{- end }}
-}
-
-type ResolverRoot interface {
-{{- range $object := .Objects -}}
- {{ if $object.HasResolvers -}}
- {{$object.GQLType}}() {{$object.GQLType}}Resolver
- {{ end }}
-{{- end }}
-}
-
-{{- range $object := .Objects -}}
- {{ if $object.HasResolvers }}
- type {{$object.GQLType}}Resolver interface {
- {{ range $field := $object.Fields -}}
- {{ $field.ShortResolverDeclaration }}
- {{ end }}
- }
- {{- end }}
-{{- end }}
-
-type shortMapper struct {
- r ResolverRoot
-}
-
-{{- range $object := .Objects -}}
- {{ range $field := $object.Fields -}}
- {{- if $field.IsResolver }}
- func (s shortMapper) {{ $field.ResolverDeclaration }} {
- return s.r.{{$field.ShortInvocation}}
- }
- {{- end }}
- {{ end }}
-{{- end }}
-
-type executableSchema struct {
- resolvers Resolvers
-}
-
-func (e *executableSchema) Schema() *schema.Schema {
- return parsedSchema
-}
-
-func (e *executableSchema) Query(ctx context.Context, op *query.Operation) *graphql.Response {
- {{- if .QueryRoot }}
- ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
-
- buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- data := ec._{{.QueryRoot.GQLType}}(ctx, op.Selections)
- var buf bytes.Buffer
- data.MarshalGQL(&buf)
- return buf.Bytes()
- })
-
- return &graphql.Response{
- Data: buf,
- Errors: ec.Errors,
- }
- {{- else }}
- return graphql.ErrorResponse(ctx, "queries are not supported")
- {{- end }}
-}
-
-func (e *executableSchema) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {
- {{- if .MutationRoot }}
- ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
-
- buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- data := ec._{{.MutationRoot.GQLType}}(ctx, op.Selections)
- var buf bytes.Buffer
- data.MarshalGQL(&buf)
- return buf.Bytes()
- })
-
- return &graphql.Response{
- Data: buf,
- Errors: ec.Errors,
- }
- {{- else }}
- return graphql.ErrorResponse(ctx, "mutations are not supported")
- {{- end }}
-}
-
-func (e *executableSchema) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {
- {{- if .SubscriptionRoot }}
- ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
-
- next := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.Selections)
- if ec.Errors != nil {
- return graphql.OneShot(&graphql.Response{Data: []byte("null"), Errors: ec.Errors})
- }
-
- var buf bytes.Buffer
- return func() *graphql.Response {
- buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- buf.Reset()
- data := next()
-
- if data == nil {
- return nil
- }
- data.MarshalGQL(&buf)
- return buf.Bytes()
- })
-
- return &graphql.Response{
- Data: buf,
- Errors: ec.Errors,
- }
- }
- {{- else }}
- return graphql.OneShot(graphql.ErrorResponse(ctx, "subscriptions are not supported"))
- {{- end }}
-}
-
-type executionContext struct {
- *graphql.RequestContext
-
- resolvers Resolvers
-}
-
-{{- range $object := .Objects }}
- {{ template "object.gotpl" $object }}
-
- {{- range $field := $object.Fields }}
- {{ template "field.gotpl" $field }}
- {{ end }}
-{{- end}}
-
-{{- range $interface := .Interfaces }}
- {{ template "interface.gotpl" $interface }}
-{{- end }}
-
-{{- range $input := .Inputs }}
- {{ template "input.gotpl" $input }}
-{{- end }}
-
-func (ec *executionContext) introspectSchema() *introspection.Schema {
- return introspection.WrapSchema(parsedSchema)
-}
-
-func (ec *executionContext) introspectType(name string) *introspection.Type {
- t := parsedSchema.Resolve(name)
- if t == nil {
- return nil
- }
- return introspection.WrapType(t)
-}
-
-var parsedSchema = schema.MustParse({{.SchemaRaw|rawQuote}})
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/input.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/input.gotpl
deleted file mode 100644
index 6073daf4..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/input.gotpl
+++ /dev/null
@@ -1,28 +0,0 @@
- {{- if .IsMarshaled }}
- func Unmarshal{{ .GQLType }}(v interface{}) ({{.FullName}}, error) {
- var it {{.FullName}}
- var asMap = v.(map[string]interface{})
- {{ range $field := .Fields}}
- {{- if $field.Default}}
- if _, present := asMap[{{$field.GQLName|quote}}] ; !present {
- asMap[{{$field.GQLName|quote}}] = {{ $field.Default | dump }}
- }
- {{- end}}
- {{- end }}
-
- for k, v := range asMap {
- switch k {
- {{- range $field := .Fields }}
- case {{$field.GQLName|quote}}:
- var err error
- {{ $field.Unmarshal (print "it." $field.GoVarName) "v" }}
- if err != nil {
- return it, err
- }
- {{- end }}
- }
- }
-
- return it, nil
- }
- {{- end }}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/interface.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/interface.gotpl
deleted file mode 100644
index 817d0abe..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/interface.gotpl
+++ /dev/null
@@ -1,18 +0,0 @@
-{{- $interface := . }}
-
-func (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel []query.Selection, obj *{{$interface.FullName}}) graphql.Marshaler {
- switch obj := (*obj).(type) {
- case nil:
- return graphql.Null
- {{- range $implementor := $interface.Implementors }}
- {{- if $implementor.ValueReceiver }}
- case {{$implementor.FullName}}:
- return ec._{{$implementor.GQLType}}(ctx, sel, &obj)
- {{- end}}
- case *{{$implementor.FullName}}:
- return ec._{{$implementor.GQLType}}(ctx, sel, obj)
- {{- end }}
- default:
- panic(fmt.Errorf("unexpected type %T", obj))
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/models.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/models.gotpl
deleted file mode 100644
index e66266a5..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/models.gotpl
+++ /dev/null
@@ -1,65 +0,0 @@
-// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.
-
-package {{ .PackageName }}
-
-import (
-{{- range $import := .Imports }}
- {{- $import.Write }}
-{{ end }}
-)
-
-{{ range $model := .Models }}
- {{- if .IsInterface }}
- type {{.GoType}} interface {}
- {{- else }}
- type {{.GoType}} struct {
- {{- range $field := .Fields }}
- {{- if $field.GoVarName }}
- {{ $field.GoVarName }} {{$field.Signature}} `json:"{{$field.GQLName}}"`
- {{- else }}
- {{ $field.GoFKName }} {{$field.GoFKType}}
- {{- end }}
- {{- end }}
- }
- {{- end }}
-{{- end}}
-
-{{ range $enum := .Enums }}
- type {{.GoType}} string
- const (
- {{ range $value := .Values -}}
- {{with .Description}} {{.|prefixLines "// "}} {{end}}
- {{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}}
- {{- end }}
- )
-
- func (e {{.GoType}}) IsValid() bool {
- switch e {
- case {{ range $index, $element := .Values}}{{if $index}},{{end}}{{ $enum.GoType }}{{ $element.Name|toCamel }}{{end}}:
- return true
- }
- return false
- }
-
- func (e {{.GoType}}) String() string {
- return string(e)
- }
-
- func (e *{{.GoType}}) UnmarshalGQL(v interface{}) error {
- str, ok := v.(string)
- if !ok {
- return fmt.Errorf("enums must be strings")
- }
-
- *e = {{.GoType}}(str)
- if !e.IsValid() {
- return fmt.Errorf("%s is not a valid {{.GQLType}}", str)
- }
- return nil
- }
-
- func (e {{.GoType}}) MarshalGQL(w io.Writer) {
- fmt.Fprint(w, strconv.Quote(e.String()))
- }
-
-{{- end }}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/object.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/object.gotpl
deleted file mode 100644
index b531d5fe..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/object.gotpl
+++ /dev/null
@@ -1,52 +0,0 @@
-{{ $object := . }}
-
-var {{ $object.GQLType|lcFirst}}Implementors = {{$object.Implementors}}
-
-// nolint: gocyclo, errcheck, gas, goconst
-{{- if .Stream }}
-func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection) func() graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
- Object: {{$object.GQLType|quote}},
- })
- if len(fields) != 1 {
- ec.Errorf(ctx, "must subscribe to exactly one stream")
- return nil
- }
-
- switch fields[0].Name {
- {{- range $field := $object.Fields }}
- case "{{$field.GQLName}}":
- return ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, fields[0])
- {{- end }}
- default:
- panic("unknown field " + strconv.Quote(fields[0].Name))
- }
-}
-{{- else }}
-func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)
- {{if $object.Root}}
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
- Object: {{$object.GQLType|quote}},
- })
- {{end}}
- out := graphql.NewOrderedMap(len(fields))
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString({{$object.GQLType|quote}})
- {{- range $field := $object.Fields }}
- case "{{$field.GQLName}}":
- out.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})
- {{- end }}
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- return out
-}
-{{- end }}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/templates.go b/vendor/github.com/vektah/gqlgen/codegen/templates/templates.go
deleted file mode 100644
index 3d29b403..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/templates.go
+++ /dev/null
@@ -1,139 +0,0 @@
-//go:generate go run ./inliner/inliner.go
-
-package templates
-
-import (
- "bytes"
- "fmt"
- "sort"
- "strconv"
- "strings"
- "text/template"
- "unicode"
-)
-
-func Run(name string, tpldata interface{}) (*bytes.Buffer, error) {
- t := template.New("").Funcs(template.FuncMap{
- "ucFirst": ucFirst,
- "lcFirst": lcFirst,
- "quote": strconv.Quote,
- "rawQuote": rawQuote,
- "toCamel": ToCamel,
- "dump": dump,
- "prefixLines": prefixLines,
- })
-
- for filename, data := range data {
- _, err := t.New(filename).Parse(data)
- if err != nil {
- panic(err)
- }
- }
-
- buf := &bytes.Buffer{}
- err := t.Lookup(name).Execute(buf, tpldata)
- if err != nil {
- return nil, err
- }
-
- return buf, nil
-}
-
-func ucFirst(s string) string {
- if s == "" {
- return ""
- }
- r := []rune(s)
- r[0] = unicode.ToUpper(r[0])
- return string(r)
-}
-
-func lcFirst(s string) string {
- if s == "" {
- return ""
- }
-
- r := []rune(s)
- r[0] = unicode.ToLower(r[0])
- return string(r)
-}
-
-func isDelimiter(c rune) bool {
- return c == '-' || c == '_' || unicode.IsSpace(c)
-}
-
-func ToCamel(s string) string {
- buffer := make([]rune, 0, len(s))
- upper := true
- lastWasUpper := false
-
- for _, c := range s {
- if isDelimiter(c) {
- upper = true
- continue
- }
- if !lastWasUpper && unicode.IsUpper(c) {
- upper = true
- }
-
- if upper {
- buffer = append(buffer, unicode.ToUpper(c))
- } else {
- buffer = append(buffer, unicode.ToLower(c))
- }
- upper = false
- lastWasUpper = unicode.IsUpper(c)
- }
-
- return string(buffer)
-}
-
-func rawQuote(s string) string {
- return "`" + strings.Replace(s, "`", "`+\"`\"+`", -1) + "`"
-}
-
-func dump(val interface{}) string {
- switch val := val.(type) {
- case int:
- return strconv.Itoa(val)
- case float64:
- return fmt.Sprintf("%f", val)
- case string:
- return strconv.Quote(val)
- case bool:
- return strconv.FormatBool(val)
- case nil:
- return "nil"
- case []interface{}:
- var parts []string
- for _, part := range val {
- parts = append(parts, dump(part))
- }
- return "[]interface{}{" + strings.Join(parts, ",") + "}"
- case map[string]interface{}:
- buf := bytes.Buffer{}
- buf.WriteString("map[string]interface{}{")
- var keys []string
- for key := range val {
- keys = append(keys, key)
- }
- sort.Strings(keys)
-
- for _, key := range keys {
- data := val[key]
-
- buf.WriteString(strconv.Quote(key))
- buf.WriteString(":")
- buf.WriteString(dump(data))
- buf.WriteString(",")
- }
- buf.WriteString("}")
- return buf.String()
- default:
- panic(fmt.Errorf("unsupported type %T", val))
- }
-}
-
-func prefixLines(prefix, s string) string {
- return prefix + strings.Replace(s, "\n", "\n"+prefix, -1)
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/type.go b/vendor/github.com/vektah/gqlgen/codegen/type.go
deleted file mode 100644
index 7af24b3c..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/type.go
+++ /dev/null
@@ -1,162 +0,0 @@
-package codegen
-
-import (
- "strconv"
- "strings"
-)
-
-type NamedTypes map[string]*NamedType
-
-type NamedType struct {
- Ref
- IsScalar bool
- IsInterface bool
- IsInput bool
- GQLType string // Name of the graphql type
- Marshaler *Ref // If this type has an external marshaler this will be set
-}
-
-type Ref struct {
- GoType string // Name of the go type
- Package string // the package the go type lives in
- Import *Import // the resolved import with alias
- IsUserDefined bool // does the type exist in the typemap
-}
-
-type Type struct {
- *NamedType
-
- Modifiers []string
- CastType *Ref // the type to cast to when unmarshalling
-}
-
-const (
- modList = "[]"
- modPtr = "*"
-)
-
-func (t Ref) FullName() string {
- return t.PkgDot() + t.GoType
-}
-
-func (t Ref) PkgDot() string {
- if t.Import == nil || t.Import.Alias() == "" {
- return ""
- }
- return t.Import.Alias() + "."
-}
-
-func (t Type) Signature() string {
- return strings.Join(t.Modifiers, "") + t.FullName()
-}
-
-func (t Type) FullSignature() string {
- pkg := ""
- if t.Package != "" {
- pkg = t.Package + "."
- }
-
- return strings.Join(t.Modifiers, "") + pkg + t.GoType
-}
-
-func (t Type) IsPtr() bool {
- return len(t.Modifiers) > 0 && t.Modifiers[0] == modPtr
-}
-
-func (t *Type) StripPtr() {
- if !t.IsPtr() {
- return
- }
- t.Modifiers = t.Modifiers[0 : len(t.Modifiers)-1]
-}
-
-func (t Type) IsSlice() bool {
- return len(t.Modifiers) > 0 && t.Modifiers[0] == modList ||
- len(t.Modifiers) > 1 && t.Modifiers[0] == modPtr && t.Modifiers[1] == modList
-}
-
-func (t NamedType) IsMarshaled() bool {
- return t.Marshaler != nil
-}
-
-func (t Type) Unmarshal(result, raw string) string {
- return t.unmarshal(result, raw, t.Modifiers, 1)
-}
-
-func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) string {
- switch {
- case len(remainingMods) > 0 && remainingMods[0] == modPtr:
- ptr := "ptr" + strconv.Itoa(depth)
- return tpl(`var {{.ptr}} {{.mods}}{{.t.FullName}}
- if {{.raw}} != nil {
- {{.next}}
- {{.result}} = &{{.ptr -}}
- }
- `, map[string]interface{}{
- "ptr": ptr,
- "t": t,
- "raw": raw,
- "result": result,
- "mods": strings.Join(remainingMods[1:], ""),
- "next": t.unmarshal(ptr, raw, remainingMods[1:], depth+1),
- })
-
- case len(remainingMods) > 0 && remainingMods[0] == modList:
- var rawIf = "rawIf" + strconv.Itoa(depth)
- var index = "idx" + strconv.Itoa(depth)
-
- return tpl(`var {{.rawSlice}} []interface{}
- if {{.raw}} != nil {
- if tmp1, ok := {{.raw}}.([]interface{}); ok {
- {{.rawSlice}} = tmp1
- }
- }
- {{.result}} = make({{.type}}, len({{.rawSlice}}))
- for {{.index}} := range {{.rawSlice}} {
- {{ .next -}}
- }`, map[string]interface{}{
- "raw": raw,
- "rawSlice": rawIf,
- "index": index,
- "result": result,
- "type": strings.Join(remainingMods, "") + t.NamedType.FullName(),
- "next": t.unmarshal(result+"["+index+"]", rawIf+"["+index+"]", remainingMods[1:], depth+1),
- })
- }
-
- realResult := result
- if t.CastType != nil {
- result = "castTmp"
- }
-
- return tpl(`{{- if .t.CastType }}
- var castTmp {{.t.FullName}}
- {{ end }}
- {{- if eq .t.GoType "map[string]interface{}" }}
- {{- .result }} = {{.raw}}.(map[string]interface{})
- {{- else if .t.Marshaler }}
- {{- .result }}, err = {{ .t.Marshaler.PkgDot }}Unmarshal{{.t.Marshaler.GoType}}({{.raw}})
- {{- else -}}
- err = (&{{.result}}).UnmarshalGQL({{.raw}})
- {{- end }}
- {{- if .t.CastType }}
- {{ .realResult }} = {{.t.CastType.FullName}}(castTmp)
- {{- end }}`, map[string]interface{}{
- "realResult": realResult,
- "result": result,
- "raw": raw,
- "t": t,
- })
-}
-
-func (t Type) Marshal(val string) string {
- if t.CastType != nil {
- val = t.GoType + "(" + val + ")"
- }
-
- if t.Marshaler != nil {
- return "return " + t.Marshaler.PkgDot() + "Marshal" + t.Marshaler.GoType + "(" + val + ")"
- }
-
- return "return " + val
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/type_build.go b/vendor/github.com/vektah/gqlgen/codegen/type_build.go
deleted file mode 100644
index ba2874b0..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/type_build.go
+++ /dev/null
@@ -1,112 +0,0 @@
-package codegen
-
-import (
- "fmt"
- "go/types"
- "strings"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/schema"
- "golang.org/x/tools/go/loader"
-)
-
-// namedTypeFromSchema objects for every graphql type, including scalars. There should only be one instance of Type for each thing
-func (cfg *Config) buildNamedTypes() NamedTypes {
- types := map[string]*NamedType{}
- for _, schemaType := range cfg.schema.Types {
- t := namedTypeFromSchema(schemaType)
-
- if userEntry, ok := cfg.Models[t.GQLType]; ok && userEntry.Model != "" {
- t.IsUserDefined = true
- t.Package, t.GoType = pkgAndType(userEntry.Model)
- } else if t.IsScalar {
- t.Package = "github.com/vektah/gqlgen/graphql"
- t.GoType = "String"
- }
-
- types[t.GQLType] = t
- }
- return types
-}
-
-func (cfg *Config) bindTypes(imports *Imports, namedTypes NamedTypes, destDir string, prog *loader.Program) {
- for _, t := range namedTypes {
- if t.Package == "" {
- continue
- }
-
- def, _ := findGoType(prog, t.Package, "Marshal"+t.GoType)
- switch def := def.(type) {
- case *types.Func:
- sig := def.Type().(*types.Signature)
- cpy := t.Ref
- t.Marshaler = &cpy
-
- t.Package, t.GoType = pkgAndType(sig.Params().At(0).Type().String())
- t.Import = imports.add(t.Package)
- }
- }
-}
-
-// namedTypeFromSchema objects for every graphql type, including primitives.
-// don't recurse into object fields or interfaces yet, lets make sure we have collected everything first.
-func namedTypeFromSchema(schemaType schema.NamedType) *NamedType {
- switch val := schemaType.(type) {
- case *schema.Scalar, *schema.Enum:
- return &NamedType{GQLType: val.TypeName(), IsScalar: true}
- case *schema.Interface, *schema.Union:
- return &NamedType{GQLType: val.TypeName(), IsInterface: true}
- case *schema.InputObject:
- return &NamedType{GQLType: val.TypeName(), IsInput: true}
- default:
- return &NamedType{GQLType: val.TypeName()}
- }
-}
-
-// take a string in the form github.com/package/blah.Type and split it into package and type
-func pkgAndType(name string) (string, string) {
- parts := strings.Split(name, ".")
- if len(parts) == 1 {
- return "", name
- }
-
- return normalizeVendor(strings.Join(parts[:len(parts)-1], ".")), parts[len(parts)-1]
-}
-
-func (n NamedTypes) getType(t common.Type) *Type {
- var modifiers []string
- usePtr := true
- for {
- if _, nonNull := t.(*common.NonNull); nonNull {
- usePtr = false
- } else if _, nonNull := t.(*common.List); nonNull {
- usePtr = true
- } else {
- if usePtr {
- modifiers = append(modifiers, modPtr)
- }
- usePtr = true
- }
-
- switch val := t.(type) {
- case *common.NonNull:
- t = val.OfType
- case *common.List:
- modifiers = append(modifiers, modList)
- t = val.OfType
- case schema.NamedType:
- t := &Type{
- NamedType: n[val.TypeName()],
- Modifiers: modifiers,
- }
-
- if t.IsInterface {
- t.StripPtr()
- }
-
- return t
- default:
- panic(fmt.Errorf("unknown type %T", t))
- }
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/util.go b/vendor/github.com/vektah/gqlgen/codegen/util.go
deleted file mode 100644
index 5ff41074..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/util.go
+++ /dev/null
@@ -1,312 +0,0 @@
-package codegen
-
-import (
- "fmt"
- "go/types"
- "regexp"
- "strings"
-
- "github.com/pkg/errors"
- "golang.org/x/tools/go/loader"
-)
-
-func findGoType(prog *loader.Program, pkgName string, typeName string) (types.Object, error) {
- if pkgName == "" {
- return nil, nil
- }
- fullName := typeName
- if pkgName != "" {
- fullName = pkgName + "." + typeName
- }
-
- pkgName, err := resolvePkg(pkgName)
- if err != nil {
- return nil, errors.Errorf("unable to resolve package for %s: %s\n", fullName, err.Error())
- }
-
- pkg := prog.Imported[pkgName]
- if pkg == nil {
- return nil, errors.Errorf("required package was not loaded: %s", fullName)
- }
-
- for astNode, def := range pkg.Defs {
- if astNode.Name != typeName || def.Parent() == nil || def.Parent() != pkg.Pkg.Scope() {
- continue
- }
-
- return def, nil
- }
-
- return nil, errors.Errorf("unable to find type %s\n", fullName)
-}
-
-func findGoNamedType(prog *loader.Program, pkgName string, typeName string) (*types.Named, error) {
- def, err := findGoType(prog, pkgName, typeName)
- if err != nil {
- return nil, err
- }
- if def == nil {
- return nil, nil
- }
-
- namedType, ok := def.Type().(*types.Named)
- if !ok {
- return nil, errors.Errorf("expected %s to be a named type, instead found %T\n", typeName, def.Type())
- }
-
- return namedType, nil
-}
-
-func findGoInterface(prog *loader.Program, pkgName string, typeName string) (*types.Interface, error) {
- namedType, err := findGoNamedType(prog, pkgName, typeName)
- if err != nil {
- return nil, err
- }
- if namedType == nil {
- return nil, nil
- }
-
- underlying, ok := namedType.Underlying().(*types.Interface)
- if !ok {
- return nil, errors.Errorf("expected %s to be a named interface, instead found %s", typeName, namedType.String())
- }
-
- return underlying, nil
-}
-
-func findMethod(typ *types.Named, name string) *types.Func {
- for i := 0; i < typ.NumMethods(); i++ {
- method := typ.Method(i)
- if !method.Exported() {
- continue
- }
-
- if strings.EqualFold(method.Name(), name) {
- return method
- }
- }
-
- if s, ok := typ.Underlying().(*types.Struct); ok {
- for i := 0; i < s.NumFields(); i++ {
- field := s.Field(i)
- if !field.Anonymous() {
- continue
- }
-
- if named, ok := field.Type().(*types.Named); ok {
- if f := findMethod(named, name); f != nil {
- return f
- }
- }
- }
- }
-
- return nil
-}
-
-func findField(typ *types.Struct, name string) *types.Var {
- for i := 0; i < typ.NumFields(); i++ {
- field := typ.Field(i)
- if field.Anonymous() {
- if named, ok := field.Type().(*types.Struct); ok {
- if f := findField(named, name); f != nil {
- return f
- }
- }
-
- if named, ok := field.Type().Underlying().(*types.Struct); ok {
- if f := findField(named, name); f != nil {
- return f
- }
- }
- }
-
- if !field.Exported() {
- continue
- }
-
- if strings.EqualFold(field.Name(), name) {
- return field
- }
- }
- return nil
-}
-
-type BindError struct {
- object *Object
- field *Field
- typ types.Type
- methodErr error
- varErr error
-}
-
-func (b BindError) Error() string {
- return fmt.Sprintf(
- "Unable to bind %s.%s to %s\n %s\n %s",
- b.object.GQLType,
- b.field.GQLName,
- b.typ.String(),
- b.methodErr.Error(),
- b.varErr.Error(),
- )
-}
-
-type BindErrors []BindError
-
-func (b BindErrors) Error() string {
- var errs []string
- for _, err := range b {
- errs = append(errs, err.Error())
- }
- return strings.Join(errs, "\n\n")
-}
-
-func bindObject(t types.Type, object *Object, imports *Imports) BindErrors {
- var errs BindErrors
- for i := range object.Fields {
- field := &object.Fields[i]
-
- // first try binding to a method
- methodErr := bindMethod(imports, t, field)
- if methodErr == nil {
- continue
- }
-
- // otherwise try binding to a var
- varErr := bindVar(imports, t, field)
-
- if varErr != nil {
- errs = append(errs, BindError{
- object: object,
- typ: t,
- field: field,
- varErr: varErr,
- methodErr: methodErr,
- })
- }
- }
- return errs
-}
-
-func bindMethod(imports *Imports, t types.Type, field *Field) error {
- namedType, ok := t.(*types.Named)
- if !ok {
- return fmt.Errorf("not a named type")
- }
-
- method := findMethod(namedType, field.GQLName)
- if method == nil {
- return fmt.Errorf("no method named %s", field.GQLName)
- }
- sig := method.Type().(*types.Signature)
-
- if sig.Results().Len() == 1 {
- field.NoErr = true
- } else if sig.Results().Len() != 2 {
- return fmt.Errorf("method has wrong number of args")
- }
- newArgs, err := matchArgs(field, sig.Params())
- if err != nil {
- return err
- }
-
- result := sig.Results().At(0)
- if err := validateTypeBinding(imports, field, result.Type()); err != nil {
- return errors.Wrap(err, "method has wrong return type")
- }
-
- // success, args and return type match. Bind to method
- field.GoMethodName = "obj." + method.Name()
- field.Args = newArgs
- return nil
-}
-
-func bindVar(imports *Imports, t types.Type, field *Field) error {
- underlying, ok := t.Underlying().(*types.Struct)
- if !ok {
- return fmt.Errorf("not a struct")
- }
-
- structField := findField(underlying, field.GQLName)
- if structField == nil {
- return fmt.Errorf("no field named %s", field.GQLName)
- }
-
- if err := validateTypeBinding(imports, field, structField.Type()); err != nil {
- return errors.Wrap(err, "field has wrong type")
- }
-
- // success, bind to var
- field.GoVarName = structField.Name()
- return nil
-}
-
-func matchArgs(field *Field, params *types.Tuple) ([]FieldArgument, error) {
- var newArgs []FieldArgument
-
-nextArg:
- for j := 0; j < params.Len(); j++ {
- param := params.At(j)
- for _, oldArg := range field.Args {
- if strings.EqualFold(oldArg.GQLName, param.Name()) {
- oldArg.Type.Modifiers = modifiersFromGoType(param.Type())
- newArgs = append(newArgs, oldArg)
- continue nextArg
- }
- }
-
- // no matching arg found, abort
- return nil, fmt.Errorf("arg %s not found on method", param.Name())
- }
- return newArgs, nil
-}
-
-func validateTypeBinding(imports *Imports, field *Field, goType types.Type) error {
- gqlType := normalizeVendor(field.Type.FullSignature())
- goTypeStr := normalizeVendor(goType.String())
-
- if goTypeStr == gqlType || "*"+goTypeStr == gqlType || goTypeStr == "*"+gqlType {
- field.Type.Modifiers = modifiersFromGoType(goType)
- return nil
- }
-
- // deal with type aliases
- underlyingStr := normalizeVendor(goType.Underlying().String())
- if underlyingStr == gqlType || "*"+underlyingStr == gqlType || underlyingStr == "*"+gqlType {
- field.Type.Modifiers = modifiersFromGoType(goType)
- pkg, typ := pkgAndType(goType.String())
- imp := imports.findByPath(pkg)
- field.CastType = &Ref{GoType: typ, Import: imp}
- return nil
- }
-
- return fmt.Errorf("%s is not compatible with %s", gqlType, goTypeStr)
-}
-
-func modifiersFromGoType(t types.Type) []string {
- var modifiers []string
- for {
- switch val := t.(type) {
- case *types.Pointer:
- modifiers = append(modifiers, modPtr)
- t = val.Elem()
- case *types.Array:
- modifiers = append(modifiers, modList)
- t = val.Elem()
- case *types.Slice:
- modifiers = append(modifiers, modList)
- t = val.Elem()
- default:
- return modifiers
- }
- }
-}
-
-var modsRegex = regexp.MustCompile(`^(\*|\[\])*`)
-
-func normalizeVendor(pkg string) string {
- modifiers := modsRegex.FindAllString(pkg, 1)[0]
- pkg = strings.TrimPrefix(pkg, modifiers)
- parts := strings.Split(pkg, "/vendor/")
- return modifiers + parts[len(parts)-1]
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/bool.go b/vendor/github.com/vektah/gqlgen/graphql/bool.go
deleted file mode 100644
index 7053bbca..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/bool.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package graphql
-
-import (
- "fmt"
- "io"
- "strings"
-)
-
-func MarshalBoolean(b bool) Marshaler {
- return WriterFunc(func(w io.Writer) {
- if b {
- w.Write(trueLit)
- } else {
- w.Write(falseLit)
- }
- })
-}
-
-func UnmarshalBoolean(v interface{}) (bool, error) {
- switch v := v.(type) {
- case string:
- return "true" == strings.ToLower(v), nil
- case int:
- return v != 0, nil
- case bool:
- return v, nil
- default:
- return false, fmt.Errorf("%T is not a bool", v)
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/context.go b/vendor/github.com/vektah/gqlgen/graphql/context.go
deleted file mode 100644
index 8f544100..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/context.go
+++ /dev/null
@@ -1,145 +0,0 @@
-package graphql
-
-import (
- "context"
- "fmt"
- "sync"
-
- "github.com/vektah/gqlgen/neelance/query"
-)
-
-type Resolver func(ctx context.Context) (res interface{}, err error)
-type ResolverMiddleware func(ctx context.Context, next Resolver) (res interface{}, err error)
-type RequestMiddleware func(ctx context.Context, next func(ctx context.Context) []byte) []byte
-
-type RequestContext struct {
- RawQuery string
- Variables map[string]interface{}
- Doc *query.Document
- // ErrorPresenter will be used to generate the error
- // message from errors given to Error().
- ErrorPresenter ErrorPresenterFunc
- Recover RecoverFunc
- ResolverMiddleware ResolverMiddleware
- RequestMiddleware RequestMiddleware
-
- errorsMu sync.Mutex
- Errors []*Error
-}
-
-func DefaultResolverMiddleware(ctx context.Context, next Resolver) (res interface{}, err error) {
- return next(ctx)
-}
-
-func DefaultRequestMiddleware(ctx context.Context, next func(ctx context.Context) []byte) []byte {
- return next(ctx)
-}
-
-func NewRequestContext(doc *query.Document, query string, variables map[string]interface{}) *RequestContext {
- return &RequestContext{
- Doc: doc,
- RawQuery: query,
- Variables: variables,
- ResolverMiddleware: DefaultResolverMiddleware,
- RequestMiddleware: DefaultRequestMiddleware,
- Recover: DefaultRecover,
- ErrorPresenter: DefaultErrorPresenter,
- }
-}
-
-type key string
-
-const (
- request key = "request_context"
- resolver key = "resolver_context"
-)
-
-func GetRequestContext(ctx context.Context) *RequestContext {
- val := ctx.Value(request)
- if val == nil {
- return nil
- }
-
- return val.(*RequestContext)
-}
-
-func WithRequestContext(ctx context.Context, rc *RequestContext) context.Context {
- return context.WithValue(ctx, request, rc)
-}
-
-type ResolverContext struct {
- // The name of the type this field belongs to
- Object string
- // These are the args after processing, they can be mutated in middleware to change what the resolver will get.
- Args map[string]interface{}
- // The raw field
- Field CollectedField
- // The path of fields to get to this resolver
- Path []interface{}
-}
-
-func (r *ResolverContext) PushField(alias string) {
- r.Path = append(r.Path, alias)
-}
-
-func (r *ResolverContext) PushIndex(index int) {
- r.Path = append(r.Path, index)
-}
-
-func (r *ResolverContext) Pop() {
- r.Path = r.Path[0 : len(r.Path)-1]
-}
-
-func GetResolverContext(ctx context.Context) *ResolverContext {
- val := ctx.Value(resolver)
- if val == nil {
- return nil
- }
-
- return val.(*ResolverContext)
-}
-
-func WithResolverContext(ctx context.Context, rc *ResolverContext) context.Context {
- parent := GetResolverContext(ctx)
- rc.Path = nil
- if parent != nil {
- rc.Path = append(rc.Path, parent.Path...)
- }
- if rc.Field.Alias != "" {
- rc.PushField(rc.Field.Alias)
- }
- return context.WithValue(ctx, resolver, rc)
-}
-
-// This is just a convenient wrapper method for CollectFields
-func CollectFieldsCtx(ctx context.Context, satisfies []string) []CollectedField {
- reqctx := GetRequestContext(ctx)
- resctx := GetResolverContext(ctx)
- return CollectFields(reqctx.Doc, resctx.Field.Selections, satisfies, reqctx.Variables)
-}
-
-// Errorf sends an error string to the client, passing it through the formatter.
-func (c *RequestContext) Errorf(ctx context.Context, format string, args ...interface{}) {
- c.errorsMu.Lock()
- defer c.errorsMu.Unlock()
-
- c.Errors = append(c.Errors, c.ErrorPresenter(ctx, fmt.Errorf(format, args...)))
-}
-
-// Error sends an error to the client, passing it through the formatter.
-func (c *RequestContext) Error(ctx context.Context, err error) {
- c.errorsMu.Lock()
- defer c.errorsMu.Unlock()
-
- c.Errors = append(c.Errors, c.ErrorPresenter(ctx, err))
-}
-
-// AddError is a convenience method for adding an error to the current response
-func AddError(ctx context.Context, err error) {
- GetRequestContext(ctx).Error(ctx, err)
-}
-
-// AddErrorf is a convenience method for adding an error to the current response
-func AddErrorf(ctx context.Context, format string, args ...interface{}) {
- GetRequestContext(ctx).Errorf(ctx, format, args...)
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/defer.go b/vendor/github.com/vektah/gqlgen/graphql/defer.go
deleted file mode 100644
index 79346a84..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/defer.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package graphql
-
-import (
- "io"
- "sync"
-)
-
-// Defer will begin executing the given function and immediately return a result that will block until the function completes
-func Defer(f func() Marshaler) Marshaler {
- var deferred deferred
- deferred.mu.Lock()
-
- go func() {
- deferred.result = f()
- deferred.mu.Unlock()
- }()
-
- return &deferred
-}
-
-type deferred struct {
- result Marshaler
- mu sync.Mutex
-}
-
-func (d *deferred) MarshalGQL(w io.Writer) {
- d.mu.Lock()
- d.result.MarshalGQL(w)
- d.mu.Unlock()
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/error.go b/vendor/github.com/vektah/gqlgen/graphql/error.go
deleted file mode 100644
index 15e65fab..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/error.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package graphql
-
-import (
- "context"
-)
-
-// Error is the standard graphql error type described in https://facebook.github.io/graphql/draft/#sec-Errors
-type Error struct {
- Message string `json:"message"`
- Path []interface{} `json:"path,omitempty"`
- Locations []ErrorLocation `json:"locations,omitempty"`
- Extensions map[string]interface{} `json:"extensions,omitempty"`
-}
-
-func (e *Error) Error() string {
- return e.Message
-}
-
-type ErrorLocation struct {
- Line int `json:"line,omitempty"`
- Column int `json:"column,omitempty"`
-}
-
-type ErrorPresenterFunc func(context.Context, error) *Error
-
-type ExtendedError interface {
- Extensions() map[string]interface{}
-}
-
-func DefaultErrorPresenter(ctx context.Context, err error) *Error {
- if gqlerr, ok := err.(*Error); ok {
- gqlerr.Path = GetResolverContext(ctx).Path
- return gqlerr
- }
-
- var extensions map[string]interface{}
- if ee, ok := err.(ExtendedError); ok {
- extensions = ee.Extensions()
- }
-
- return &Error{
- Message: err.Error(),
- Path: GetResolverContext(ctx).Path,
- Extensions: extensions,
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/exec.go b/vendor/github.com/vektah/gqlgen/graphql/exec.go
deleted file mode 100644
index 2c034888..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/exec.go
+++ /dev/null
@@ -1,118 +0,0 @@
-package graphql
-
-import (
- "context"
- "fmt"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-type ExecutableSchema interface {
- Schema() *schema.Schema
-
- Query(ctx context.Context, op *query.Operation) *Response
- Mutation(ctx context.Context, op *query.Operation) *Response
- Subscription(ctx context.Context, op *query.Operation) func() *Response
-}
-
-func CollectFields(doc *query.Document, selSet []query.Selection, satisfies []string, variables map[string]interface{}) []CollectedField {
- return collectFields(doc, selSet, satisfies, variables, map[string]bool{})
-}
-
-func collectFields(doc *query.Document, selSet []query.Selection, satisfies []string, variables map[string]interface{}, visited map[string]bool) []CollectedField {
- var groupedFields []CollectedField
-
- for _, sel := range selSet {
- switch sel := sel.(type) {
- case *query.Field:
- f := getOrCreateField(&groupedFields, sel.Alias.Name, func() CollectedField {
- f := CollectedField{
- Alias: sel.Alias.Name,
- Name: sel.Name.Name,
- }
- if len(sel.Arguments) > 0 {
- f.Args = map[string]interface{}{}
- for _, arg := range sel.Arguments {
- if variable, ok := arg.Value.(*common.Variable); ok {
- if val, ok := variables[variable.Name]; ok {
- f.Args[arg.Name.Name] = val
- }
- } else {
- f.Args[arg.Name.Name] = arg.Value.Value(variables)
- }
- }
- }
- return f
- })
-
- f.Selections = append(f.Selections, sel.Selections...)
- case *query.InlineFragment:
- if !instanceOf(sel.On.Ident.Name, satisfies) {
- continue
- }
-
- for _, childField := range collectFields(doc, sel.Selections, satisfies, variables, visited) {
- f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
- f.Selections = append(f.Selections, childField.Selections...)
- }
-
- case *query.FragmentSpread:
- fragmentName := sel.Name.Name
- if _, seen := visited[fragmentName]; seen {
- continue
- }
- visited[fragmentName] = true
-
- fragment := doc.Fragments.Get(fragmentName)
- if fragment == nil {
- // should never happen, validator has already run
- panic(fmt.Errorf("missing fragment %s", fragmentName))
- }
-
- if !instanceOf(fragment.On.Ident.Name, satisfies) {
- continue
- }
-
- for _, childField := range collectFields(doc, fragment.Selections, satisfies, variables, visited) {
- f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
- f.Selections = append(f.Selections, childField.Selections...)
- }
-
- default:
- panic(fmt.Errorf("unsupported %T", sel))
- }
- }
-
- return groupedFields
-}
-
-type CollectedField struct {
- Alias string
- Name string
- Args map[string]interface{}
- Selections []query.Selection
-}
-
-func instanceOf(val string, satisfies []string) bool {
- for _, s := range satisfies {
- if val == s {
- return true
- }
- }
- return false
-}
-
-func getOrCreateField(c *[]CollectedField, name string, creator func() CollectedField) *CollectedField {
- for i, cf := range *c {
- if cf.Alias == name {
- return &(*c)[i]
- }
- }
-
- f := creator()
-
- *c = append(*c, f)
- return &(*c)[len(*c)-1]
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/float.go b/vendor/github.com/vektah/gqlgen/graphql/float.go
deleted file mode 100644
index c08b490a..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/float.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package graphql
-
-import (
- "fmt"
- "io"
- "strconv"
-)
-
-func MarshalFloat(f float64) Marshaler {
- return WriterFunc(func(w io.Writer) {
- io.WriteString(w, fmt.Sprintf("%f", f))
- })
-}
-
-func UnmarshalFloat(v interface{}) (float64, error) {
- switch v := v.(type) {
- case string:
- return strconv.ParseFloat(v, 64)
- case int:
- return float64(v), nil
- case float64:
- return v, nil
- default:
- return 0, fmt.Errorf("%T is not an float", v)
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/id.go b/vendor/github.com/vektah/gqlgen/graphql/id.go
deleted file mode 100644
index 7958670c..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/id.go
+++ /dev/null
@@ -1,33 +0,0 @@
-package graphql
-
-import (
- "fmt"
- "io"
- "strconv"
-)
-
-func MarshalID(s string) Marshaler {
- return WriterFunc(func(w io.Writer) {
- io.WriteString(w, strconv.Quote(s))
- })
-}
-func UnmarshalID(v interface{}) (string, error) {
- switch v := v.(type) {
- case string:
- return v, nil
- case int:
- return strconv.Itoa(v), nil
- case float64:
- return fmt.Sprintf("%f", v), nil
- case bool:
- if v {
- return "true", nil
- } else {
- return "false", nil
- }
- case nil:
- return "null", nil
- default:
- return "", fmt.Errorf("%T is not a string", v)
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/int.go b/vendor/github.com/vektah/gqlgen/graphql/int.go
deleted file mode 100644
index b63b4c2a..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/int.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package graphql
-
-import (
- "fmt"
- "io"
- "strconv"
-)
-
-func MarshalInt(i int) Marshaler {
- return WriterFunc(func(w io.Writer) {
- io.WriteString(w, strconv.Itoa(i))
- })
-}
-
-func UnmarshalInt(v interface{}) (int, error) {
- switch v := v.(type) {
- case string:
- return strconv.Atoi(v)
- case int:
- return v, nil
- case float64:
- return int(v), nil
- default:
- return 0, fmt.Errorf("%T is not an int", v)
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/jsonw.go b/vendor/github.com/vektah/gqlgen/graphql/jsonw.go
deleted file mode 100644
index ef9e69c7..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/jsonw.go
+++ /dev/null
@@ -1,83 +0,0 @@
-package graphql
-
-import (
- "io"
- "strconv"
-)
-
-var nullLit = []byte(`null`)
-var trueLit = []byte(`true`)
-var falseLit = []byte(`false`)
-var openBrace = []byte(`{`)
-var closeBrace = []byte(`}`)
-var openBracket = []byte(`[`)
-var closeBracket = []byte(`]`)
-var colon = []byte(`:`)
-var comma = []byte(`,`)
-
-var Null = lit(nullLit)
-var True = lit(trueLit)
-var False = lit(falseLit)
-
-type Marshaler interface {
- MarshalGQL(w io.Writer)
-}
-
-type Unmarshaler interface {
- UnmarshalGQL(v interface{}) error
-}
-
-type OrderedMap struct {
- Keys []string
- Values []Marshaler
-}
-
-type WriterFunc func(writer io.Writer)
-
-func (f WriterFunc) MarshalGQL(w io.Writer) {
- f(w)
-}
-
-func NewOrderedMap(len int) *OrderedMap {
- return &OrderedMap{
- Keys: make([]string, len),
- Values: make([]Marshaler, len),
- }
-}
-
-func (m *OrderedMap) Add(key string, value Marshaler) {
- m.Keys = append(m.Keys, key)
- m.Values = append(m.Values, value)
-}
-
-func (m *OrderedMap) MarshalGQL(writer io.Writer) {
- writer.Write(openBrace)
- for i, key := range m.Keys {
- if i != 0 {
- writer.Write(comma)
- }
- io.WriteString(writer, strconv.Quote(key))
- writer.Write(colon)
- m.Values[i].MarshalGQL(writer)
- }
- writer.Write(closeBrace)
-}
-
-type Array []Marshaler
-
-func (a Array) MarshalGQL(writer io.Writer) {
- writer.Write(openBracket)
- for i, val := range a {
- if i != 0 {
- writer.Write(comma)
- }
- val.MarshalGQL(writer)
- }
- writer.Write(closeBracket)
-}
-
-func lit(b []byte) Marshaler {
- return WriterFunc(func(w io.Writer) {
- w.Write(b)
- })
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/map.go b/vendor/github.com/vektah/gqlgen/graphql/map.go
deleted file mode 100644
index 1e91d1d9..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/map.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package graphql
-
-import (
- "encoding/json"
- "fmt"
- "io"
-)
-
-func MarshalMap(val map[string]interface{}) Marshaler {
- return WriterFunc(func(w io.Writer) {
- err := json.NewEncoder(w).Encode(val)
- if err != nil {
- panic(err)
- }
- })
-}
-
-func UnmarshalMap(v interface{}) (map[string]interface{}, error) {
- if m, ok := v.(map[string]interface{}); ok {
- return m, nil
- }
-
- return nil, fmt.Errorf("%T is not a map", v)
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/oneshot.go b/vendor/github.com/vektah/gqlgen/graphql/oneshot.go
deleted file mode 100644
index dd31f5ba..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/oneshot.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package graphql
-
-func OneShot(resp *Response) func() *Response {
- var oneshot bool
-
- return func() *Response {
- if oneshot {
- return nil
- }
- oneshot = true
-
- return resp
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/recovery.go b/vendor/github.com/vektah/gqlgen/graphql/recovery.go
deleted file mode 100644
index 3aa032dc..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/recovery.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package graphql
-
-import (
- "context"
- "errors"
- "fmt"
- "os"
- "runtime/debug"
-)
-
-type RecoverFunc func(ctx context.Context, err interface{}) (userMessage error)
-
-func DefaultRecover(ctx context.Context, err interface{}) error {
- fmt.Fprintln(os.Stderr, err)
- fmt.Fprintln(os.Stderr)
- debug.PrintStack()
-
- return errors.New("internal system error")
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/response.go b/vendor/github.com/vektah/gqlgen/graphql/response.go
deleted file mode 100644
index c0dc1c23..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/response.go
+++ /dev/null
@@ -1,18 +0,0 @@
-package graphql
-
-import (
- "context"
- "encoding/json"
- "fmt"
-)
-
-type Response struct {
- Data json.RawMessage `json:"data"`
- Errors []*Error `json:"errors,omitempty"`
-}
-
-func ErrorResponse(ctx context.Context, messagef string, args ...interface{}) *Response {
- return &Response{
- Errors: []*Error{{Message: fmt.Sprintf(messagef, args...)}},
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/string.go b/vendor/github.com/vektah/gqlgen/graphql/string.go
deleted file mode 100644
index d5fb3294..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/string.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package graphql
-
-import (
- "fmt"
- "io"
- "strconv"
-)
-
-const encodeHex = "0123456789ABCDEF"
-
-func MarshalString(s string) Marshaler {
- return WriterFunc(func(w io.Writer) {
- start := 0
- io.WriteString(w, `"`)
-
- for i, c := range s {
- if c < 0x20 || c == '\\' || c == '"' {
- io.WriteString(w, s[start:i])
-
- switch c {
- case '\t':
- io.WriteString(w, `\t`)
- case '\r':
- io.WriteString(w, `\r`)
- case '\n':
- io.WriteString(w, `\n`)
- case '\\':
- io.WriteString(w, `\\`)
- case '"':
- io.WriteString(w, `\"`)
- default:
- io.WriteString(w, `\u00`)
- w.Write([]byte{encodeHex[c>>4], encodeHex[c&0xf]})
- }
-
- start = i + 1
- }
- }
-
- io.WriteString(w, s[start:])
- io.WriteString(w, `"`)
- })
-}
-func UnmarshalString(v interface{}) (string, error) {
- switch v := v.(type) {
- case string:
- return v, nil
- case int:
- return strconv.Itoa(v), nil
- case float64:
- return fmt.Sprintf("%f", v), nil
- case bool:
- if v {
- return "true", nil
- } else {
- return "false", nil
- }
- case nil:
- return "null", nil
- default:
- return "", fmt.Errorf("%T is not a string", v)
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/time.go b/vendor/github.com/vektah/gqlgen/graphql/time.go
deleted file mode 100644
index 4f448560..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/time.go
+++ /dev/null
@@ -1,21 +0,0 @@
-package graphql
-
-import (
- "errors"
- "io"
- "strconv"
- "time"
-)
-
-func MarshalTime(t time.Time) Marshaler {
- return WriterFunc(func(w io.Writer) {
- io.WriteString(w, strconv.Quote(t.Format(time.RFC3339)))
- })
-}
-
-func UnmarshalTime(v interface{}) (time.Time, error) {
- if tmpStr, ok := v.(string); ok {
- return time.Parse(time.RFC3339, tmpStr)
- }
- return time.Time{}, errors.New("time should be RFC3339 formatted string")
-}
diff --git a/vendor/github.com/vektah/gqlgen/handler/graphql.go b/vendor/github.com/vektah/gqlgen/handler/graphql.go
deleted file mode 100644
index 4a5c61f5..00000000
--- a/vendor/github.com/vektah/gqlgen/handler/graphql.go
+++ /dev/null
@@ -1,235 +0,0 @@
-package handler
-
-import (
- "context"
- "encoding/json"
- "fmt"
- "net/http"
- "strings"
-
- "github.com/gorilla/websocket"
- "github.com/vektah/gqlgen/graphql"
- "github.com/vektah/gqlgen/neelance/errors"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/validation"
-)
-
-type params struct {
- Query string `json:"query"`
- OperationName string `json:"operationName"`
- Variables map[string]interface{} `json:"variables"`
-}
-
-type Config struct {
- upgrader websocket.Upgrader
- recover graphql.RecoverFunc
- errorPresenter graphql.ErrorPresenterFunc
- resolverHook graphql.ResolverMiddleware
- requestHook graphql.RequestMiddleware
-}
-
-func (c *Config) newRequestContext(doc *query.Document, query string, variables map[string]interface{}) *graphql.RequestContext {
- reqCtx := graphql.NewRequestContext(doc, query, variables)
- if hook := c.recover; hook != nil {
- reqCtx.Recover = hook
- }
-
- if hook := c.errorPresenter; hook != nil {
- reqCtx.ErrorPresenter = hook
- }
-
- if hook := c.resolverHook; hook != nil {
- reqCtx.ResolverMiddleware = hook
- }
-
- if hook := c.requestHook; hook != nil {
- reqCtx.RequestMiddleware = hook
- }
-
- return reqCtx
-}
-
-type Option func(cfg *Config)
-
-func WebsocketUpgrader(upgrader websocket.Upgrader) Option {
- return func(cfg *Config) {
- cfg.upgrader = upgrader
- }
-}
-
-func RecoverFunc(recover graphql.RecoverFunc) Option {
- return func(cfg *Config) {
- cfg.recover = recover
- }
-}
-
-// ErrorPresenter transforms errors found while resolving into errors that will be returned to the user. It provides
-// a good place to add any extra fields, like error.type, that might be desired by your frontend. Check the default
-// implementation in graphql.DefaultErrorPresenter for an example.
-func ErrorPresenter(f graphql.ErrorPresenterFunc) Option {
- return func(cfg *Config) {
- cfg.errorPresenter = f
- }
-}
-
-// ResolverMiddleware allows you to define a function that will be called around every resolver,
-// useful for tracing and logging.
-// It will only be called for user defined resolvers, any direct binding to models is assumed
-// to cost nothing.
-func ResolverMiddleware(middleware graphql.ResolverMiddleware) Option {
- return func(cfg *Config) {
- if cfg.resolverHook == nil {
- cfg.resolverHook = middleware
- return
- }
-
- lastResolve := cfg.resolverHook
- cfg.resolverHook = func(ctx context.Context, next graphql.Resolver) (res interface{}, err error) {
- return lastResolve(ctx, func(ctx context.Context) (res interface{}, err error) {
- return middleware(ctx, next)
- })
- }
- }
-}
-
-// RequestMiddleware allows you to define a function that will be called around the root request,
-// after the query has been parsed. This is useful for logging and tracing
-func RequestMiddleware(middleware graphql.RequestMiddleware) Option {
- return func(cfg *Config) {
- if cfg.requestHook == nil {
- cfg.requestHook = middleware
- return
- }
-
- lastResolve := cfg.requestHook
- cfg.requestHook = func(ctx context.Context, next func(ctx context.Context) []byte) []byte {
- return lastResolve(ctx, func(ctx context.Context) []byte {
- return middleware(ctx, next)
- })
- }
- }
-}
-
-func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc {
- cfg := Config{
- upgrader: websocket.Upgrader{
- ReadBufferSize: 1024,
- WriteBufferSize: 1024,
- },
- }
-
- for _, option := range options {
- option(&cfg)
- }
-
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- if r.Method == http.MethodOptions {
- w.Header().Set("Allow", "OPTIONS, GET, POST")
- w.WriteHeader(http.StatusOK)
- return
- }
-
- if strings.Contains(r.Header.Get("Upgrade"), "websocket") {
- connectWs(exec, w, r, &cfg)
- return
- }
-
- var reqParams params
- switch r.Method {
- case http.MethodGet:
- reqParams.Query = r.URL.Query().Get("query")
- reqParams.OperationName = r.URL.Query().Get("operationName")
-
- if variables := r.URL.Query().Get("variables"); variables != "" {
- if err := json.Unmarshal([]byte(variables), &reqParams.Variables); err != nil {
- sendErrorf(w, http.StatusBadRequest, "variables could not be decoded")
- return
- }
- }
- case http.MethodPost:
- if err := json.NewDecoder(r.Body).Decode(&reqParams); err != nil {
- sendErrorf(w, http.StatusBadRequest, "json body could not be decoded: "+err.Error())
- return
- }
- default:
- w.WriteHeader(http.StatusMethodNotAllowed)
- return
- }
- w.Header().Set("Content-Type", "application/json")
-
- doc, qErr := query.Parse(reqParams.Query)
- if qErr != nil {
- sendError(w, http.StatusUnprocessableEntity, qErr)
- return
- }
-
- errs := validation.Validate(exec.Schema(), doc)
- if len(errs) != 0 {
- sendError(w, http.StatusUnprocessableEntity, errs...)
- return
- }
-
- op, err := doc.GetOperation(reqParams.OperationName)
- if err != nil {
- sendErrorf(w, http.StatusUnprocessableEntity, err.Error())
- return
- }
-
- reqCtx := cfg.newRequestContext(doc, reqParams.Query, reqParams.Variables)
- ctx := graphql.WithRequestContext(r.Context(), reqCtx)
-
- defer func() {
- if err := recover(); err != nil {
- userErr := reqCtx.Recover(ctx, err)
- sendErrorf(w, http.StatusUnprocessableEntity, userErr.Error())
- }
- }()
-
- switch op.Type {
- case query.Query:
- b, err := json.Marshal(exec.Query(ctx, op))
- if err != nil {
- panic(err)
- }
- w.Write(b)
- case query.Mutation:
- b, err := json.Marshal(exec.Mutation(ctx, op))
- if err != nil {
- panic(err)
- }
- w.Write(b)
- default:
- sendErrorf(w, http.StatusBadRequest, "unsupported operation type")
- }
- })
-}
-
-func sendError(w http.ResponseWriter, code int, errors ...*errors.QueryError) {
- w.WriteHeader(code)
- var errs []*graphql.Error
- for _, err := range errors {
- var locations []graphql.ErrorLocation
- for _, l := range err.Locations {
- fmt.Println(graphql.ErrorLocation(l))
- locations = append(locations, graphql.ErrorLocation{
- Line: l.Line,
- Column: l.Column,
- })
- }
-
- errs = append(errs, &graphql.Error{
- Message: err.Message,
- Path: err.Path,
- Locations: locations,
- })
- }
- b, err := json.Marshal(&graphql.Response{Errors: errs})
- if err != nil {
- panic(err)
- }
- w.Write(b)
-}
-
-func sendErrorf(w http.ResponseWriter, code int, format string, args ...interface{}) {
- sendError(w, code, &errors.QueryError{Message: fmt.Sprintf(format, args...)})
-}
diff --git a/vendor/github.com/vektah/gqlgen/handler/playground.go b/vendor/github.com/vektah/gqlgen/handler/playground.go
deleted file mode 100644
index 44533590..00000000
--- a/vendor/github.com/vektah/gqlgen/handler/playground.go
+++ /dev/null
@@ -1,51 +0,0 @@
-package handler
-
-import (
- "html/template"
- "net/http"
-)
-
-var page = template.Must(template.New("graphiql").Parse(`<!DOCTYPE html>
-<html>
-<head>
- <meta charset=utf-8/>
- <meta name="viewport" content="user-scalable=no, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, minimal-ui">
- <link rel="shortcut icon" href="https://graphcool-playground.netlify.com/favicon.png">
- <link rel="stylesheet" href="//cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/static/css/index.css"/>
- <link rel="shortcut icon" href="//cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/favicon.png"/>
- <script src="//cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/static/js/middleware.js"></script>
- <title>{{.title}}</title>
-</head>
-<body>
-<style type="text/css">
- html { font-family: "Open Sans", sans-serif; overflow: hidden; }
- body { margin: 0; background: #172a3a; }
-</style>
-<div id="root"/>
-<script type="text/javascript">
- window.addEventListener('load', function (event) {
- const root = document.getElementById('root');
- root.classList.add('playgroundIn');
- const wsProto = location.protocol == 'https:' ? 'wss:' : 'ws:'
- GraphQLPlayground.init(root, {
- endpoint: location.protocol + '//' + location.host + '{{.endpoint}}',
- subscriptionsEndpoint: wsProto + '//' + location.host + '{{.endpoint }}',
- })
- })
-</script>
-</body>
-</html>
-`))
-
-func Playground(title string, endpoint string) http.HandlerFunc {
- return func(w http.ResponseWriter, r *http.Request) {
- err := page.Execute(w, map[string]string{
- "title": title,
- "endpoint": endpoint,
- "version": "1.4.3",
- })
- if err != nil {
- panic(err)
- }
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/handler/stub.go b/vendor/github.com/vektah/gqlgen/handler/stub.go
deleted file mode 100644
index 46b27e46..00000000
--- a/vendor/github.com/vektah/gqlgen/handler/stub.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package handler
-
-import (
- "context"
- "time"
-
- "github.com/vektah/gqlgen/graphql"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-type executableSchemaStub struct {
-}
-
-var _ graphql.ExecutableSchema = &executableSchemaStub{}
-
-func (e *executableSchemaStub) Schema() *schema.Schema {
- return schema.MustParse(`
- schema { query: Query }
- type Query { me: User! }
- type User { name: String! }
- `)
-}
-
-func (e *executableSchemaStub) Query(ctx context.Context, op *query.Operation) *graphql.Response {
- return &graphql.Response{Data: []byte(`{"name":"test"}`)}
-}
-
-func (e *executableSchemaStub) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {
- return graphql.ErrorResponse(ctx, "mutations are not supported")
-}
-
-func (e *executableSchemaStub) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {
- return func() *graphql.Response {
- time.Sleep(50 * time.Millisecond)
- select {
- case <-ctx.Done():
- return nil
- default:
- return &graphql.Response{
- Data: []byte(`{"name":"test"}`),
- }
- }
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/handler/websocket.go b/vendor/github.com/vektah/gqlgen/handler/websocket.go
deleted file mode 100644
index e80748ca..00000000
--- a/vendor/github.com/vektah/gqlgen/handler/websocket.go
+++ /dev/null
@@ -1,245 +0,0 @@
-package handler
-
-import (
- "context"
- "encoding/json"
- "fmt"
- "log"
- "net/http"
- "sync"
-
- "github.com/gorilla/websocket"
- "github.com/vektah/gqlgen/graphql"
- "github.com/vektah/gqlgen/neelance/errors"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/validation"
-)
-
-const (
- connectionInitMsg = "connection_init" // Client -> Server
- connectionTerminateMsg = "connection_terminate" // Client -> Server
- startMsg = "start" // Client -> Server
- stopMsg = "stop" // Client -> Server
- connectionAckMsg = "connection_ack" // Server -> Client
- connectionErrorMsg = "connection_error" // Server -> Client
- dataMsg = "data" // Server -> Client
- errorMsg = "error" // Server -> Client
- completeMsg = "complete" // Server -> Client
- //connectionKeepAliveMsg = "ka" // Server -> Client TODO: keepalives
-)
-
-type operationMessage struct {
- Payload json.RawMessage `json:"payload,omitempty"`
- ID string `json:"id,omitempty"`
- Type string `json:"type"`
-}
-
-type wsConnection struct {
- ctx context.Context
- conn *websocket.Conn
- exec graphql.ExecutableSchema
- active map[string]context.CancelFunc
- mu sync.Mutex
- cfg *Config
-}
-
-func connectWs(exec graphql.ExecutableSchema, w http.ResponseWriter, r *http.Request, cfg *Config) {
- ws, err := cfg.upgrader.Upgrade(w, r, http.Header{
- "Sec-Websocket-Protocol": []string{"graphql-ws"},
- })
- if err != nil {
- log.Printf("unable to upgrade %T to websocket %s: ", w, err.Error())
- sendErrorf(w, http.StatusBadRequest, "unable to upgrade")
- return
- }
-
- conn := wsConnection{
- active: map[string]context.CancelFunc{},
- exec: exec,
- conn: ws,
- ctx: r.Context(),
- cfg: cfg,
- }
-
- if !conn.init() {
- return
- }
-
- conn.run()
-}
-
-func (c *wsConnection) init() bool {
- message := c.readOp()
- if message == nil {
- c.close(websocket.CloseProtocolError, "decoding error")
- return false
- }
-
- switch message.Type {
- case connectionInitMsg:
- c.write(&operationMessage{Type: connectionAckMsg})
- case connectionTerminateMsg:
- c.close(websocket.CloseNormalClosure, "terminated")
- return false
- default:
- c.sendConnectionError("unexpected message %s", message.Type)
- c.close(websocket.CloseProtocolError, "unexpected message")
- return false
- }
-
- return true
-}
-
-func (c *wsConnection) write(msg *operationMessage) {
- c.mu.Lock()
- c.conn.WriteJSON(msg)
- c.mu.Unlock()
-}
-
-func (c *wsConnection) run() {
- for {
- message := c.readOp()
- if message == nil {
- return
- }
-
- switch message.Type {
- case startMsg:
- if !c.subscribe(message) {
- return
- }
- case stopMsg:
- c.mu.Lock()
- closer := c.active[message.ID]
- c.mu.Unlock()
- if closer == nil {
- c.sendError(message.ID, errors.Errorf("%s is not running, cannot stop", message.ID))
- continue
- }
-
- closer()
- case connectionTerminateMsg:
- c.close(websocket.CloseNormalClosure, "terminated")
- return
- default:
- c.sendConnectionError("unexpected message %s", message.Type)
- c.close(websocket.CloseProtocolError, "unexpected message")
- return
- }
- }
-}
-
-func (c *wsConnection) subscribe(message *operationMessage) bool {
- var reqParams params
- if err := json.Unmarshal(message.Payload, &reqParams); err != nil {
- c.sendConnectionError("invalid json")
- return false
- }
-
- doc, qErr := query.Parse(reqParams.Query)
- if qErr != nil {
- c.sendError(message.ID, qErr)
- return true
- }
-
- errs := validation.Validate(c.exec.Schema(), doc)
- if len(errs) != 0 {
- c.sendError(message.ID, errs...)
- return true
- }
-
- op, err := doc.GetOperation(reqParams.OperationName)
- if err != nil {
- c.sendError(message.ID, errors.Errorf("%s", err.Error()))
- return true
- }
-
- reqCtx := c.cfg.newRequestContext(doc, reqParams.Query, reqParams.Variables)
- ctx := graphql.WithRequestContext(c.ctx, reqCtx)
-
- if op.Type != query.Subscription {
- var result *graphql.Response
- if op.Type == query.Query {
- result = c.exec.Query(ctx, op)
- } else {
- result = c.exec.Mutation(ctx, op)
- }
-
- c.sendData(message.ID, result)
- c.write(&operationMessage{ID: message.ID, Type: completeMsg})
- return true
- }
-
- ctx, cancel := context.WithCancel(ctx)
- c.mu.Lock()
- c.active[message.ID] = cancel
- c.mu.Unlock()
- go func() {
- defer func() {
- if r := recover(); r != nil {
- userErr := reqCtx.Recover(ctx, r)
- c.sendError(message.ID, &errors.QueryError{Message: userErr.Error()})
- }
- }()
- next := c.exec.Subscription(ctx, op)
- for result := next(); result != nil; result = next() {
- c.sendData(message.ID, result)
- }
-
- c.write(&operationMessage{ID: message.ID, Type: completeMsg})
-
- c.mu.Lock()
- delete(c.active, message.ID)
- c.mu.Unlock()
- cancel()
- }()
-
- return true
-}
-
-func (c *wsConnection) sendData(id string, response *graphql.Response) {
- b, err := json.Marshal(response)
- if err != nil {
- c.sendError(id, errors.Errorf("unable to encode json response: %s", err.Error()))
- return
- }
-
- c.write(&operationMessage{Type: dataMsg, ID: id, Payload: b})
-}
-
-func (c *wsConnection) sendError(id string, errors ...*errors.QueryError) {
- var errs []error
- for _, err := range errors {
- errs = append(errs, err)
- }
- b, err := json.Marshal(errs)
- if err != nil {
- panic(err)
- }
- c.write(&operationMessage{Type: errorMsg, ID: id, Payload: b})
-}
-
-func (c *wsConnection) sendConnectionError(format string, args ...interface{}) {
- b, err := json.Marshal(&graphql.Error{Message: fmt.Sprintf(format, args...)})
- if err != nil {
- panic(err)
- }
-
- c.write(&operationMessage{Type: connectionErrorMsg, Payload: b})
-}
-
-func (c *wsConnection) readOp() *operationMessage {
- message := operationMessage{}
- if err := c.conn.ReadJSON(&message); err != nil {
- c.sendConnectionError("invalid json")
- return nil
- }
- return &message
-}
-
-func (c *wsConnection) close(closeCode int, message string) {
- c.mu.Lock()
- _ = c.conn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(closeCode, message))
- c.mu.Unlock()
- _ = c.conn.Close()
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/LICENSE b/vendor/github.com/vektah/gqlgen/neelance/LICENSE
deleted file mode 100644
index 3907ceca..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright (c) 2016 Richard Musiol. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/directive.go b/vendor/github.com/vektah/gqlgen/neelance/common/directive.go
deleted file mode 100644
index 62dca47f..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/directive.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package common
-
-type Directive struct {
- Name Ident
- Args ArgumentList
-}
-
-func ParseDirectives(l *Lexer) DirectiveList {
- var directives DirectiveList
- for l.Peek() == '@' {
- l.ConsumeToken('@')
- d := &Directive{}
- d.Name = l.ConsumeIdentWithLoc()
- d.Name.Loc.Column--
- if l.Peek() == '(' {
- d.Args = ParseArguments(l)
- }
- directives = append(directives, d)
- }
- return directives
-}
-
-type DirectiveList []*Directive
-
-func (l DirectiveList) Get(name string) *Directive {
- for _, d := range l {
- if d.Name.Name == name {
- return d
- }
- }
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/lexer.go b/vendor/github.com/vektah/gqlgen/neelance/common/lexer.go
deleted file mode 100644
index fdc1e622..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/lexer.go
+++ /dev/null
@@ -1,122 +0,0 @@
-package common
-
-import (
- "fmt"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type syntaxError string
-
-type Lexer struct {
- sc *scanner.Scanner
- next rune
- descComment string
-}
-
-type Ident struct {
- Name string
- Loc errors.Location
-}
-
-func New(sc *scanner.Scanner) *Lexer {
- l := &Lexer{sc: sc}
- l.Consume()
- return l
-}
-
-func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) {
- defer func() {
- if err := recover(); err != nil {
- if err, ok := err.(syntaxError); ok {
- errRes = errors.Errorf("syntax error: %s", err)
- errRes.Locations = []errors.Location{l.Location()}
- return
- }
- panic(err)
- }
- }()
-
- f()
- return
-}
-
-func (l *Lexer) Peek() rune {
- return l.next
-}
-
-func (l *Lexer) Consume() {
- l.descComment = ""
- for {
- l.next = l.sc.Scan()
- if l.next == ',' {
- continue
- }
- if l.next == '#' {
- if l.sc.Peek() == ' ' {
- l.sc.Next()
- }
- if l.descComment != "" {
- l.descComment += "\n"
- }
- for {
- next := l.sc.Next()
- if next == '\n' || next == scanner.EOF {
- break
- }
- l.descComment += string(next)
- }
- continue
- }
- break
- }
-}
-
-func (l *Lexer) ConsumeIdent() string {
- name := l.sc.TokenText()
- l.ConsumeToken(scanner.Ident)
- return name
-}
-
-func (l *Lexer) ConsumeIdentWithLoc() Ident {
- loc := l.Location()
- name := l.sc.TokenText()
- l.ConsumeToken(scanner.Ident)
- return Ident{name, loc}
-}
-
-func (l *Lexer) ConsumeKeyword(keyword string) {
- if l.next != scanner.Ident || l.sc.TokenText() != keyword {
- l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword))
- }
- l.Consume()
-}
-
-func (l *Lexer) ConsumeLiteral() *BasicLit {
- lit := &BasicLit{Type: l.next, Text: l.sc.TokenText()}
- l.Consume()
- return lit
-}
-
-func (l *Lexer) ConsumeToken(expected rune) {
- if l.next != expected {
- l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected)))
- }
- l.Consume()
-}
-
-func (l *Lexer) DescComment() string {
- return l.descComment
-}
-
-func (l *Lexer) SyntaxError(message string) {
- panic(syntaxError(message))
-}
-
-func (l *Lexer) Location() errors.Location {
- return errors.Location{
- Line: l.sc.Line,
- Column: l.sc.Column,
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/literals.go b/vendor/github.com/vektah/gqlgen/neelance/common/literals.go
deleted file mode 100644
index 55619ba0..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/literals.go
+++ /dev/null
@@ -1,206 +0,0 @@
-package common
-
-import (
- "strconv"
- "strings"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type Literal interface {
- Value(vars map[string]interface{}) interface{}
- String() string
- Location() errors.Location
-}
-
-type BasicLit struct {
- Type rune
- Text string
- Loc errors.Location
-}
-
-func (lit *BasicLit) Value(vars map[string]interface{}) interface{} {
- switch lit.Type {
- case scanner.Int:
- value, err := strconv.ParseInt(lit.Text, 10, 64)
- if err != nil {
- panic(err)
- }
- return int(value)
-
- case scanner.Float:
- value, err := strconv.ParseFloat(lit.Text, 64)
- if err != nil {
- panic(err)
- }
- return value
-
- case scanner.String:
- value, err := strconv.Unquote(lit.Text)
- if err != nil {
- panic(err)
- }
- return value
-
- case scanner.Ident:
- switch lit.Text {
- case "true":
- return true
- case "false":
- return false
- default:
- return lit.Text
- }
-
- default:
- panic("invalid literal")
- }
-}
-
-func (lit *BasicLit) String() string {
- return lit.Text
-}
-
-func (lit *BasicLit) Location() errors.Location {
- return lit.Loc
-}
-
-type ListLit struct {
- Entries []Literal
- Loc errors.Location
-}
-
-func (lit *ListLit) Value(vars map[string]interface{}) interface{} {
- entries := make([]interface{}, len(lit.Entries))
- for i, entry := range lit.Entries {
- entries[i] = entry.Value(vars)
- }
- return entries
-}
-
-func (lit *ListLit) String() string {
- entries := make([]string, len(lit.Entries))
- for i, entry := range lit.Entries {
- entries[i] = entry.String()
- }
- return "[" + strings.Join(entries, ", ") + "]"
-}
-
-func (lit *ListLit) Location() errors.Location {
- return lit.Loc
-}
-
-type ObjectLit struct {
- Fields []*ObjectLitField
- Loc errors.Location
-}
-
-type ObjectLitField struct {
- Name Ident
- Value Literal
-}
-
-func (lit *ObjectLit) Value(vars map[string]interface{}) interface{} {
- fields := make(map[string]interface{}, len(lit.Fields))
- for _, f := range lit.Fields {
- fields[f.Name.Name] = f.Value.Value(vars)
- }
- return fields
-}
-
-func (lit *ObjectLit) String() string {
- entries := make([]string, 0, len(lit.Fields))
- for _, f := range lit.Fields {
- entries = append(entries, f.Name.Name+": "+f.Value.String())
- }
- return "{" + strings.Join(entries, ", ") + "}"
-}
-
-func (lit *ObjectLit) Location() errors.Location {
- return lit.Loc
-}
-
-type NullLit struct {
- Loc errors.Location
-}
-
-func (lit *NullLit) Value(vars map[string]interface{}) interface{} {
- return nil
-}
-
-func (lit *NullLit) String() string {
- return "null"
-}
-
-func (lit *NullLit) Location() errors.Location {
- return lit.Loc
-}
-
-type Variable struct {
- Name string
- Loc errors.Location
-}
-
-func (v Variable) Value(vars map[string]interface{}) interface{} {
- return vars[v.Name]
-}
-
-func (v Variable) String() string {
- return "$" + v.Name
-}
-
-func (v *Variable) Location() errors.Location {
- return v.Loc
-}
-
-func ParseLiteral(l *Lexer, constOnly bool) Literal {
- loc := l.Location()
- switch l.Peek() {
- case '$':
- if constOnly {
- l.SyntaxError("variable not allowed")
- panic("unreachable")
- }
- l.ConsumeToken('$')
- return &Variable{l.ConsumeIdent(), loc}
-
- case scanner.Int, scanner.Float, scanner.String, scanner.Ident:
- lit := l.ConsumeLiteral()
- if lit.Type == scanner.Ident && lit.Text == "null" {
- return &NullLit{loc}
- }
- lit.Loc = loc
- return lit
- case '-':
- l.ConsumeToken('-')
- lit := l.ConsumeLiteral()
- lit.Text = "-" + lit.Text
- lit.Loc = loc
- return lit
- case '[':
- l.ConsumeToken('[')
- var list []Literal
- for l.Peek() != ']' {
- list = append(list, ParseLiteral(l, constOnly))
- }
- l.ConsumeToken(']')
- return &ListLit{list, loc}
-
- case '{':
- l.ConsumeToken('{')
- var fields []*ObjectLitField
- for l.Peek() != '}' {
- name := l.ConsumeIdentWithLoc()
- l.ConsumeToken(':')
- value := ParseLiteral(l, constOnly)
- fields = append(fields, &ObjectLitField{name, value})
- }
- l.ConsumeToken('}')
- return &ObjectLit{fields, loc}
-
- default:
- l.SyntaxError("invalid value")
- panic("unreachable")
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/types.go b/vendor/github.com/vektah/gqlgen/neelance/common/types.go
deleted file mode 100644
index 0bbf24ef..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/types.go
+++ /dev/null
@@ -1,80 +0,0 @@
-package common
-
-import (
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type Type interface {
- Kind() string
- String() string
-}
-
-type List struct {
- OfType Type
-}
-
-type NonNull struct {
- OfType Type
-}
-
-type TypeName struct {
- Ident
-}
-
-func (*List) Kind() string { return "LIST" }
-func (*NonNull) Kind() string { return "NON_NULL" }
-func (*TypeName) Kind() string { panic("TypeName needs to be resolved to actual type") }
-
-func (t *List) String() string { return "[" + t.OfType.String() + "]" }
-func (t *NonNull) String() string { return t.OfType.String() + "!" }
-func (*TypeName) String() string { panic("TypeName needs to be resolved to actual type") }
-
-func ParseType(l *Lexer) Type {
- t := parseNullType(l)
- if l.Peek() == '!' {
- l.ConsumeToken('!')
- return &NonNull{OfType: t}
- }
- return t
-}
-
-func parseNullType(l *Lexer) Type {
- if l.Peek() == '[' {
- l.ConsumeToken('[')
- ofType := ParseType(l)
- l.ConsumeToken(']')
- return &List{OfType: ofType}
- }
-
- return &TypeName{Ident: l.ConsumeIdentWithLoc()}
-}
-
-type Resolver func(name string) Type
-
-func ResolveType(t Type, resolver Resolver) (Type, *errors.QueryError) {
- switch t := t.(type) {
- case *List:
- ofType, err := ResolveType(t.OfType, resolver)
- if err != nil {
- return nil, err
- }
- return &List{OfType: ofType}, nil
- case *NonNull:
- ofType, err := ResolveType(t.OfType, resolver)
- if err != nil {
- return nil, err
- }
- return &NonNull{OfType: ofType}, nil
- case *TypeName:
- refT := resolver(t.Name)
- if refT == nil {
- err := errors.Errorf("Unknown type %q.", t.Name)
- err.Rule = "KnownTypeNames"
- err.Locations = []errors.Location{t.Loc}
- return nil, err
- }
- return refT, nil
- default:
- return t, nil
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/values.go b/vendor/github.com/vektah/gqlgen/neelance/common/values.go
deleted file mode 100644
index 09338da8..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/values.go
+++ /dev/null
@@ -1,77 +0,0 @@
-package common
-
-import (
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type InputValue struct {
- Name Ident
- Type Type
- Default Literal
- Desc string
- Loc errors.Location
- TypeLoc errors.Location
-}
-
-type InputValueList []*InputValue
-
-func (l InputValueList) Get(name string) *InputValue {
- for _, v := range l {
- if v.Name.Name == name {
- return v
- }
- }
- return nil
-}
-
-func ParseInputValue(l *Lexer) *InputValue {
- p := &InputValue{}
- p.Loc = l.Location()
- p.Desc = l.DescComment()
- p.Name = l.ConsumeIdentWithLoc()
- l.ConsumeToken(':')
- p.TypeLoc = l.Location()
- p.Type = ParseType(l)
- if l.Peek() == '=' {
- l.ConsumeToken('=')
- p.Default = ParseLiteral(l, true)
- }
- return p
-}
-
-type Argument struct {
- Name Ident
- Value Literal
-}
-
-type ArgumentList []Argument
-
-func (l ArgumentList) Get(name string) (Literal, bool) {
- for _, arg := range l {
- if arg.Name.Name == name {
- return arg.Value, true
- }
- }
- return nil, false
-}
-
-func (l ArgumentList) MustGet(name string) Literal {
- value, ok := l.Get(name)
- if !ok {
- panic("argument not found")
- }
- return value
-}
-
-func ParseArguments(l *Lexer) ArgumentList {
- var args ArgumentList
- l.ConsumeToken('(')
- for l.Peek() != ')' {
- name := l.ConsumeIdentWithLoc()
- l.ConsumeToken(':')
- value := ParseLiteral(l, false)
- args = append(args, Argument{Name: name, Value: value})
- }
- l.ConsumeToken(')')
- return args
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/errors/errors.go b/vendor/github.com/vektah/gqlgen/neelance/errors/errors.go
deleted file mode 100644
index fdfa6202..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/errors/errors.go
+++ /dev/null
@@ -1,41 +0,0 @@
-package errors
-
-import (
- "fmt"
-)
-
-type QueryError struct {
- Message string `json:"message"`
- Locations []Location `json:"locations,omitempty"`
- Path []interface{} `json:"path,omitempty"`
- Rule string `json:"-"`
- ResolverError error `json:"-"`
-}
-
-type Location struct {
- Line int `json:"line"`
- Column int `json:"column"`
-}
-
-func (a Location) Before(b Location) bool {
- return a.Line < b.Line || (a.Line == b.Line && a.Column < b.Column)
-}
-
-func Errorf(format string, a ...interface{}) *QueryError {
- return &QueryError{
- Message: fmt.Sprintf(format, a...),
- }
-}
-
-func (err *QueryError) Error() string {
- if err == nil {
- return "<nil>"
- }
- str := fmt.Sprintf("graphql: %s", err.Message)
- for _, loc := range err.Locations {
- str += fmt.Sprintf(" (line %d, column %d)", loc.Line, loc.Column)
- }
- return str
-}
-
-var _ error = &QueryError{}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/introspection/introspection.go b/vendor/github.com/vektah/gqlgen/neelance/introspection/introspection.go
deleted file mode 100644
index 5e354c9a..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/introspection/introspection.go
+++ /dev/null
@@ -1,313 +0,0 @@
-package introspection
-
-import (
- "sort"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-type Schema struct {
- schema *schema.Schema
-}
-
-// WrapSchema is only used internally.
-func WrapSchema(schema *schema.Schema) *Schema {
- return &Schema{schema}
-}
-
-func (r *Schema) Types() []Type {
- var names []string
- for name := range r.schema.Types {
- names = append(names, name)
- }
- sort.Strings(names)
-
- l := make([]Type, len(names))
- for i, name := range names {
- l[i] = Type{r.schema.Types[name]}
- }
- return l
-}
-
-func (r *Schema) Directives() []Directive {
- var names []string
- for name := range r.schema.Directives {
- names = append(names, name)
- }
- sort.Strings(names)
-
- l := make([]Directive, len(names))
- for i, name := range names {
- l[i] = Directive{r.schema.Directives[name]}
- }
- return l
-}
-
-func (r *Schema) QueryType() Type {
- t, ok := r.schema.EntryPoints["query"]
- if !ok {
- return Type{}
- }
- return Type{t}
-}
-
-func (r *Schema) MutationType() *Type {
- t, ok := r.schema.EntryPoints["mutation"]
- if !ok {
- return nil
- }
- return &Type{t}
-}
-
-func (r *Schema) SubscriptionType() *Type {
- t, ok := r.schema.EntryPoints["subscription"]
- if !ok {
- return nil
- }
- return &Type{t}
-}
-
-type Type struct {
- typ common.Type
-}
-
-// WrapType is only used internally.
-func WrapType(typ common.Type) *Type {
- return &Type{typ}
-}
-
-func (r *Type) Kind() string {
- return r.typ.Kind()
-}
-
-func (r *Type) Name() *string {
- if named, ok := r.typ.(schema.NamedType); ok {
- name := named.TypeName()
- return &name
- }
- return nil
-}
-
-func (r *Type) Description() *string {
- if named, ok := r.typ.(schema.NamedType); ok {
- desc := named.Description()
- if desc == "" {
- return nil
- }
- return &desc
- }
- return nil
-}
-
-func (r *Type) Fields(includeDeprecated bool) []Field {
- var fields schema.FieldList
- switch t := r.typ.(type) {
- case *schema.Object:
- fields = t.Fields
- case *schema.Interface:
- fields = t.Fields
- default:
- return nil
- }
-
- var l []Field
- for _, f := range fields {
- if d := f.Directives.Get("deprecated"); d == nil || includeDeprecated {
- l = append(l, Field{f})
- }
- }
- return l
-}
-
-func (r *Type) Interfaces() []Type {
- t, ok := r.typ.(*schema.Object)
- if !ok {
- return nil
- }
-
- l := make([]Type, len(t.Interfaces))
- for i, intf := range t.Interfaces {
- l[i] = Type{intf}
- }
- return l
-}
-
-func (r *Type) PossibleTypes() []Type {
- var possibleTypes []*schema.Object
- switch t := r.typ.(type) {
- case *schema.Interface:
- possibleTypes = t.PossibleTypes
- case *schema.Union:
- possibleTypes = t.PossibleTypes
- default:
- return nil
- }
-
- l := make([]Type, len(possibleTypes))
- for i, intf := range possibleTypes {
- l[i] = Type{intf}
- }
- return l
-}
-
-func (r *Type) EnumValues(includeDeprecated bool) []EnumValue {
- t, ok := r.typ.(*schema.Enum)
- if !ok {
- return nil
- }
-
- var l []EnumValue
- for _, v := range t.Values {
- if d := v.Directives.Get("deprecated"); d == nil || includeDeprecated {
- l = append(l, EnumValue{v})
- }
- }
- return l
-}
-
-func (r *Type) InputFields() []InputValue {
- t, ok := r.typ.(*schema.InputObject)
- if !ok {
- return nil
- }
-
- l := make([]InputValue, len(t.Values))
- for i, v := range t.Values {
- l[i] = InputValue{v}
- }
- return l
-}
-
-func (r *Type) OfType() *Type {
- switch t := r.typ.(type) {
- case *common.List:
- return &Type{t.OfType}
- case *common.NonNull:
- return &Type{t.OfType}
- default:
- return nil
- }
-}
-
-type Field struct {
- field *schema.Field
-}
-
-func (r *Field) Name() string {
- return r.field.Name
-}
-
-func (r *Field) Description() *string {
- if r.field.Desc == "" {
- return nil
- }
- return &r.field.Desc
-}
-
-func (r *Field) Args() []InputValue {
- l := make([]InputValue, len(r.field.Args))
- for i, v := range r.field.Args {
- l[i] = InputValue{v}
- }
- return l
-}
-
-func (r *Field) Type() Type {
- return Type{r.field.Type}
-}
-
-func (r *Field) IsDeprecated() bool {
- return r.field.Directives.Get("deprecated") != nil
-}
-
-func (r *Field) DeprecationReason() *string {
- d := r.field.Directives.Get("deprecated")
- if d == nil {
- return nil
- }
- reason := d.Args.MustGet("reason").Value(nil).(string)
- return &reason
-}
-
-type InputValue struct {
- value *common.InputValue
-}
-
-func (r *InputValue) Name() string {
- return r.value.Name.Name
-}
-
-func (r *InputValue) Description() *string {
- if r.value.Desc == "" {
- return nil
- }
- return &r.value.Desc
-}
-
-func (r *InputValue) Type() Type {
- return Type{r.value.Type}
-}
-
-func (r *InputValue) DefaultValue() *string {
- if r.value.Default == nil {
- return nil
- }
- s := r.value.Default.String()
- return &s
-}
-
-type EnumValue struct {
- value *schema.EnumValue
-}
-
-func (r *EnumValue) Name() string {
- return r.value.Name
-}
-
-func (r *EnumValue) Description() *string {
- if r.value.Desc == "" {
- return nil
- }
- return &r.value.Desc
-}
-
-func (r *EnumValue) IsDeprecated() bool {
- return r.value.Directives.Get("deprecated") != nil
-}
-
-func (r *EnumValue) DeprecationReason() *string {
- d := r.value.Directives.Get("deprecated")
- if d == nil {
- return nil
- }
- reason := d.Args.MustGet("reason").Value(nil).(string)
- return &reason
-}
-
-type Directive struct {
- directive *schema.DirectiveDecl
-}
-
-func (r *Directive) Name() string {
- return r.directive.Name
-}
-
-func (r *Directive) Description() *string {
- if r.directive.Desc == "" {
- return nil
- }
- return &r.directive.Desc
-}
-
-func (r *Directive) Locations() []string {
- return r.directive.Locs
-}
-
-func (r *Directive) Args() []InputValue {
- l := make([]InputValue, len(r.directive.Args))
- for i, v := range r.directive.Args {
- l[i] = InputValue{v}
- }
- return l
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/introspection/query.go b/vendor/github.com/vektah/gqlgen/neelance/introspection/query.go
deleted file mode 100644
index b1e4fbc6..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/introspection/query.go
+++ /dev/null
@@ -1,104 +0,0 @@
-package introspection
-
-// Query is the query generated by graphiql to determine type information
-const Query = `
-query IntrospectionQuery {
- __schema {
- queryType {
- name
- }
- mutationType {
- name
- }
- subscriptionType {
- name
- }
- types {
- ...FullType
- }
- directives {
- name
- description
- locations
- args {
- ...InputValue
- }
- }
- }
-}
-
-fragment FullType on __Type {
- kind
- name
- description
- fields(includeDeprecated: true) {
- name
- description
- args {
- ...InputValue
- }
- type {
- ...TypeRef
- }
- isDeprecated
- deprecationReason
- }
- inputFields {
- ...InputValue
- }
- interfaces {
- ...TypeRef
- }
- enumValues(includeDeprecated: true) {
- name
- description
- isDeprecated
- deprecationReason
- }
- possibleTypes {
- ...TypeRef
- }
-}
-
-fragment InputValue on __InputValue {
- name
- description
- type {
- ...TypeRef
- }
- defaultValue
-}
-
-fragment TypeRef on __Type {
- kind
- name
- ofType {
- kind
- name
- ofType {
- kind
- name
- ofType {
- kind
- name
- ofType {
- kind
- name
- ofType {
- kind
- name
- ofType {
- kind
- name
- ofType {
- kind
- name
- }
- }
- }
- }
- }
- }
- }
-}
-`
diff --git a/vendor/github.com/vektah/gqlgen/neelance/query/query.go b/vendor/github.com/vektah/gqlgen/neelance/query/query.go
deleted file mode 100644
index b6f35354..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/query/query.go
+++ /dev/null
@@ -1,261 +0,0 @@
-package query
-
-import (
- "fmt"
- "strings"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type Document struct {
- Operations OperationList
- Fragments FragmentList
-}
-
-type OperationList []*Operation
-
-func (l OperationList) Get(name string) *Operation {
- for _, f := range l {
- if f.Name.Name == name {
- return f
- }
- }
- return nil
-}
-
-type FragmentList []*FragmentDecl
-
-func (l FragmentList) Get(name string) *FragmentDecl {
- for _, f := range l {
- if f.Name.Name == name {
- return f
- }
- }
- return nil
-}
-
-type Operation struct {
- Type OperationType
- Name common.Ident
- Vars common.InputValueList
- Selections []Selection
- Directives common.DirectiveList
- Loc errors.Location
-}
-
-type OperationType string
-
-const (
- Query OperationType = "QUERY"
- Mutation = "MUTATION"
- Subscription = "SUBSCRIPTION"
-)
-
-type Fragment struct {
- On common.TypeName
- Selections []Selection
-}
-
-type FragmentDecl struct {
- Fragment
- Name common.Ident
- Directives common.DirectiveList
- Loc errors.Location
-}
-
-type Selection interface {
- isSelection()
-}
-
-type Field struct {
- Alias common.Ident
- Name common.Ident
- Arguments common.ArgumentList
- Directives common.DirectiveList
- Selections []Selection
- SelectionSetLoc errors.Location
-}
-
-type InlineFragment struct {
- Fragment
- Directives common.DirectiveList
- Loc errors.Location
-}
-
-type FragmentSpread struct {
- Name common.Ident
- Directives common.DirectiveList
- Loc errors.Location
-}
-
-func (Field) isSelection() {}
-func (InlineFragment) isSelection() {}
-func (FragmentSpread) isSelection() {}
-
-func Parse(queryString string) (*Document, *errors.QueryError) {
- sc := &scanner.Scanner{
- Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings,
- }
- sc.Init(strings.NewReader(queryString))
-
- l := common.New(sc)
- var doc *Document
- err := l.CatchSyntaxError(func() {
- doc = parseDocument(l)
- })
- if err != nil {
- return nil, err
- }
-
- return doc, nil
-}
-
-func parseDocument(l *common.Lexer) *Document {
- d := &Document{}
- for l.Peek() != scanner.EOF {
- if l.Peek() == '{' {
- op := &Operation{Type: Query, Loc: l.Location()}
- op.Selections = parseSelectionSet(l)
- d.Operations = append(d.Operations, op)
- continue
- }
-
- loc := l.Location()
- switch x := l.ConsumeIdent(); x {
- case "query":
- op := parseOperation(l, Query)
- op.Loc = loc
- d.Operations = append(d.Operations, op)
-
- case "mutation":
- d.Operations = append(d.Operations, parseOperation(l, Mutation))
-
- case "subscription":
- d.Operations = append(d.Operations, parseOperation(l, Subscription))
-
- case "fragment":
- frag := parseFragment(l)
- frag.Loc = loc
- d.Fragments = append(d.Fragments, frag)
-
- default:
- l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "fragment"`, x))
- }
- }
- return d
-}
-
-func parseOperation(l *common.Lexer, opType OperationType) *Operation {
- op := &Operation{Type: opType}
- op.Name.Loc = l.Location()
- if l.Peek() == scanner.Ident {
- op.Name = l.ConsumeIdentWithLoc()
- }
- op.Directives = common.ParseDirectives(l)
- if l.Peek() == '(' {
- l.ConsumeToken('(')
- for l.Peek() != ')' {
- loc := l.Location()
- l.ConsumeToken('$')
- iv := common.ParseInputValue(l)
- iv.Loc = loc
- op.Vars = append(op.Vars, iv)
- }
- l.ConsumeToken(')')
- }
- op.Selections = parseSelectionSet(l)
- return op
-}
-
-func parseFragment(l *common.Lexer) *FragmentDecl {
- f := &FragmentDecl{}
- f.Name = l.ConsumeIdentWithLoc()
- l.ConsumeKeyword("on")
- f.On = common.TypeName{Ident: l.ConsumeIdentWithLoc()}
- f.Directives = common.ParseDirectives(l)
- f.Selections = parseSelectionSet(l)
- return f
-}
-
-func parseSelectionSet(l *common.Lexer) []Selection {
- var sels []Selection
- l.ConsumeToken('{')
- for l.Peek() != '}' {
- sels = append(sels, parseSelection(l))
- }
- l.ConsumeToken('}')
- return sels
-}
-
-func parseSelection(l *common.Lexer) Selection {
- if l.Peek() == '.' {
- return parseSpread(l)
- }
- return parseField(l)
-}
-
-func parseField(l *common.Lexer) *Field {
- f := &Field{}
- f.Alias = l.ConsumeIdentWithLoc()
- f.Name = f.Alias
- if l.Peek() == ':' {
- l.ConsumeToken(':')
- f.Name = l.ConsumeIdentWithLoc()
- }
- if l.Peek() == '(' {
- f.Arguments = common.ParseArguments(l)
- }
- f.Directives = common.ParseDirectives(l)
- if l.Peek() == '{' {
- f.SelectionSetLoc = l.Location()
- f.Selections = parseSelectionSet(l)
- }
- return f
-}
-
-func parseSpread(l *common.Lexer) Selection {
- loc := l.Location()
- l.ConsumeToken('.')
- l.ConsumeToken('.')
- l.ConsumeToken('.')
-
- f := &InlineFragment{Loc: loc}
- if l.Peek() == scanner.Ident {
- ident := l.ConsumeIdentWithLoc()
- if ident.Name != "on" {
- fs := &FragmentSpread{
- Name: ident,
- Loc: loc,
- }
- fs.Directives = common.ParseDirectives(l)
- return fs
- }
- f.On = common.TypeName{Ident: l.ConsumeIdentWithLoc()}
- }
- f.Directives = common.ParseDirectives(l)
- f.Selections = parseSelectionSet(l)
- return f
-}
-
-func (d *Document) GetOperation(operationName string) (*Operation, error) {
- if len(d.Operations) == 0 {
- return nil, fmt.Errorf("no operations in query document")
- }
-
- if operationName == "" {
- if len(d.Operations) > 1 {
- return nil, fmt.Errorf("more than one operation in query document and no operation name given")
- }
- for _, op := range d.Operations {
- return op, nil // return the one and only operation
- }
- }
-
- op := d.Operations.Get(operationName)
- if op == nil {
- return nil, fmt.Errorf("no operation with name %q", operationName)
- }
- return op, nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/schema/meta.go b/vendor/github.com/vektah/gqlgen/neelance/schema/meta.go
deleted file mode 100644
index efdcaa2c..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/schema/meta.go
+++ /dev/null
@@ -1,193 +0,0 @@
-package schema
-
-var Meta *Schema
-
-func init() {
- Meta = &Schema{} // bootstrap
- Meta = New()
- if err := Meta.Parse(metaSrc); err != nil {
- panic(err)
- }
-}
-
-var metaSrc = `
- # The ` + "`" + `Int` + "`" + ` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
- scalar Int
-
- # The ` + "`" + `Float` + "`" + ` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
- scalar Float
-
- # The ` + "`" + `String` + "`" + ` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
- scalar String
-
- # The ` + "`" + `Boolean` + "`" + ` scalar type represents ` + "`" + `true` + "`" + ` or ` + "`" + `false` + "`" + `.
- scalar Boolean
-
- # The ` + "`" + `ID` + "`" + ` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as ` + "`" + `"4"` + "`" + `) or integer (such as ` + "`" + `4` + "`" + `) input value will be accepted as an ID.
- scalar ID
-
- # The ` + "`" + `Map` + "`" + ` scalar type is a simple json object
- scalar Map
-
- # Directs the executor to include this field or fragment only when the ` + "`" + `if` + "`" + ` argument is true.
- directive @include(
- # Included when true.
- if: Boolean!
- ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
-
- # Directs the executor to skip this field or fragment when the ` + "`" + `if` + "`" + ` argument is true.
- directive @skip(
- # Skipped when true.
- if: Boolean!
- ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
-
- # Marks an element of a GraphQL schema as no longer supported.
- directive @deprecated(
- # Explains why this element was deprecated, usually also including a suggestion
- # for how to access supported similar data. Formatted in
- # [Markdown](https://daringfireball.net/projects/markdown/).
- reason: String = "No longer supported"
- ) on FIELD_DEFINITION | ENUM_VALUE
-
- # A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.
- #
- # In some cases, you need to provide options to alter GraphQL's execution behavior
- # in ways field arguments will not suffice, such as conditionally including or
- # skipping a field. Directives provide this by describing additional information
- # to the executor.
- type __Directive {
- name: String!
- description: String
- locations: [__DirectiveLocation!]!
- args: [__InputValue!]!
- }
-
- # A Directive can be adjacent to many parts of the GraphQL language, a
- # __DirectiveLocation describes one such possible adjacencies.
- enum __DirectiveLocation {
- # Location adjacent to a query operation.
- QUERY
- # Location adjacent to a mutation operation.
- MUTATION
- # Location adjacent to a subscription operation.
- SUBSCRIPTION
- # Location adjacent to a field.
- FIELD
- # Location adjacent to a fragment definition.
- FRAGMENT_DEFINITION
- # Location adjacent to a fragment spread.
- FRAGMENT_SPREAD
- # Location adjacent to an inline fragment.
- INLINE_FRAGMENT
- # Location adjacent to a schema definition.
- SCHEMA
- # Location adjacent to a scalar definition.
- SCALAR
- # Location adjacent to an object type definition.
- OBJECT
- # Location adjacent to a field definition.
- FIELD_DEFINITION
- # Location adjacent to an argument definition.
- ARGUMENT_DEFINITION
- # Location adjacent to an interface definition.
- INTERFACE
- # Location adjacent to a union definition.
- UNION
- # Location adjacent to an enum definition.
- ENUM
- # Location adjacent to an enum value definition.
- ENUM_VALUE
- # Location adjacent to an input object type definition.
- INPUT_OBJECT
- # Location adjacent to an input object field definition.
- INPUT_FIELD_DEFINITION
- }
-
- # One possible value for a given Enum. Enum values are unique values, not a
- # placeholder for a string or numeric value. However an Enum value is returned in
- # a JSON response as a string.
- type __EnumValue {
- name: String!
- description: String
- isDeprecated: Boolean!
- deprecationReason: String
- }
-
- # Object and Interface types are described by a list of Fields, each of which has
- # a name, potentially a list of arguments, and a return type.
- type __Field {
- name: String!
- description: String
- args: [__InputValue!]!
- type: __Type!
- isDeprecated: Boolean!
- deprecationReason: String
- }
-
- # Arguments provided to Fields or Directives and the input fields of an
- # InputObject are represented as Input Values which describe their type and
- # optionally a default value.
- type __InputValue {
- name: String!
- description: String
- type: __Type!
- # A GraphQL-formatted string representing the default value for this input value.
- defaultValue: String
- }
-
- # A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all
- # available types and directives on the server, as well as the entry points for
- # query, mutation, and subscription operations.
- type __Schema {
- # A list of all types supported by this server.
- types: [__Type!]!
- # The type that query operations will be rooted at.
- queryType: __Type!
- # If this server supports mutation, the type that mutation operations will be rooted at.
- mutationType: __Type
- # If this server support subscription, the type that subscription operations will be rooted at.
- subscriptionType: __Type
- # A list of all directives supported by this server.
- directives: [__Directive!]!
- }
-
- # The fundamental unit of any GraphQL Schema is the type. There are many kinds of
- # types in GraphQL as represented by the ` + "`" + `__TypeKind` + "`" + ` enum.
- #
- # Depending on the kind of a type, certain fields describe information about that
- # type. Scalar types provide no information beyond a name and description, while
- # Enum types provide their values. Object and Interface types provide the fields
- # they describe. Abstract types, Union and Interface, provide the Object types
- # possible at runtime. List and NonNull types compose other types.
- type __Type {
- kind: __TypeKind!
- name: String
- description: String
- fields(includeDeprecated: Boolean = false): [__Field!]
- interfaces: [__Type!]
- possibleTypes: [__Type!]
- enumValues(includeDeprecated: Boolean = false): [__EnumValue!]
- inputFields: [__InputValue!]
- ofType: __Type
- }
-
- # An enum describing what kind of type a given ` + "`" + `__Type` + "`" + ` is.
- enum __TypeKind {
- # Indicates this type is a scalar.
- SCALAR
- # Indicates this type is an object. ` + "`" + `fields` + "`" + ` and ` + "`" + `interfaces` + "`" + ` are valid fields.
- OBJECT
- # Indicates this type is an interface. ` + "`" + `fields` + "`" + ` and ` + "`" + `possibleTypes` + "`" + ` are valid fields.
- INTERFACE
- # Indicates this type is a union. ` + "`" + `possibleTypes` + "`" + ` is a valid field.
- UNION
- # Indicates this type is an enum. ` + "`" + `enumValues` + "`" + ` is a valid field.
- ENUM
- # Indicates this type is an input object. ` + "`" + `inputFields` + "`" + ` is a valid field.
- INPUT_OBJECT
- # Indicates this type is a list. ` + "`" + `ofType` + "`" + ` is a valid field.
- LIST
- # Indicates this type is a non-null. ` + "`" + `ofType` + "`" + ` is a valid field.
- NON_NULL
- }
-`
diff --git a/vendor/github.com/vektah/gqlgen/neelance/schema/schema.go b/vendor/github.com/vektah/gqlgen/neelance/schema/schema.go
deleted file mode 100644
index 0b1317a5..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/schema/schema.go
+++ /dev/null
@@ -1,489 +0,0 @@
-package schema
-
-import (
- "fmt"
- "strings"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type Schema struct {
- EntryPoints map[string]NamedType
- Types map[string]NamedType
- Directives map[string]*DirectiveDecl
-
- entryPointNames map[string]string
- objects []*Object
- unions []*Union
- enums []*Enum
-}
-
-var defaultEntrypoints = map[string]string{
- "query": "Query",
- "mutation": "Mutation",
- "subscription": "Subscription",
-}
-
-func (s *Schema) Resolve(name string) common.Type {
- return s.Types[name]
-}
-
-type NamedType interface {
- common.Type
- TypeName() string
- Description() string
-}
-
-type Scalar struct {
- Name string
- Desc string
-}
-
-type Object struct {
- Name string
- Interfaces []*Interface
- Fields FieldList
- Desc string
-
- interfaceNames []string
-}
-
-type Interface struct {
- Name string
- PossibleTypes []*Object
- Fields FieldList
- Desc string
-}
-
-type Union struct {
- Name string
- PossibleTypes []*Object
- Desc string
-
- typeNames []string
-}
-
-type Enum struct {
- Name string
- Values []*EnumValue
- Desc string
-}
-
-type EnumValue struct {
- Name string
- Directives common.DirectiveList
- Desc string
-}
-
-type InputObject struct {
- Name string
- Desc string
- Values common.InputValueList
-}
-
-type FieldList []*Field
-
-func (l FieldList) Get(name string) *Field {
- for _, f := range l {
- if f.Name == name {
- return f
- }
- }
- return nil
-}
-
-func (l FieldList) Names() []string {
- names := make([]string, len(l))
- for i, f := range l {
- names[i] = f.Name
- }
- return names
-}
-
-type DirectiveDecl struct {
- Name string
- Desc string
- Locs []string
- Args common.InputValueList
-}
-
-func (*Scalar) Kind() string { return "SCALAR" }
-func (*Object) Kind() string { return "OBJECT" }
-func (*Interface) Kind() string { return "INTERFACE" }
-func (*Union) Kind() string { return "UNION" }
-func (*Enum) Kind() string { return "ENUM" }
-func (*InputObject) Kind() string { return "INPUT_OBJECT" }
-
-func (t *Scalar) String() string { return t.Name }
-func (t *Object) String() string { return t.Name }
-func (t *Interface) String() string { return t.Name }
-func (t *Union) String() string { return t.Name }
-func (t *Enum) String() string { return t.Name }
-func (t *InputObject) String() string { return t.Name }
-
-func (t *Scalar) TypeName() string { return t.Name }
-func (t *Object) TypeName() string { return t.Name }
-func (t *Interface) TypeName() string { return t.Name }
-func (t *Union) TypeName() string { return t.Name }
-func (t *Enum) TypeName() string { return t.Name }
-func (t *InputObject) TypeName() string { return t.Name }
-
-func (t *Scalar) Description() string { return t.Desc }
-func (t *Object) Description() string { return t.Desc }
-func (t *Interface) Description() string { return t.Desc }
-func (t *Union) Description() string { return t.Desc }
-func (t *Enum) Description() string { return t.Desc }
-func (t *InputObject) Description() string { return t.Desc }
-
-type Field struct {
- Name string
- Args common.InputValueList
- Type common.Type
- Directives common.DirectiveList
- Desc string
-}
-
-func MustParse(str string) *Schema {
- s := New()
- err := s.Parse(str)
- if err != nil {
- panic(err)
- }
- return s
-}
-
-func New() *Schema {
- s := &Schema{
- entryPointNames: make(map[string]string),
- Types: make(map[string]NamedType),
- Directives: make(map[string]*DirectiveDecl),
- }
- for n, t := range Meta.Types {
- s.Types[n] = t
- }
- for n, d := range Meta.Directives {
- s.Directives[n] = d
- }
- return s
-}
-
-func (s *Schema) Parse(schemaString string) error {
- sc := &scanner.Scanner{
- Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings,
- }
- sc.Init(strings.NewReader(schemaString))
-
- l := common.New(sc)
- err := l.CatchSyntaxError(func() {
- parseSchema(s, l)
- })
- if err != nil {
- return err
- }
-
- for _, t := range s.Types {
- if err := resolveNamedType(s, t); err != nil {
- return err
- }
- }
- for _, d := range s.Directives {
- for _, arg := range d.Args {
- t, err := common.ResolveType(arg.Type, s.Resolve)
- if err != nil {
- return err
- }
- arg.Type = t
- }
- }
-
- s.EntryPoints = make(map[string]NamedType)
- for key, name := range s.entryPointNames {
- t, ok := s.Types[name]
- if !ok {
- if !ok {
- return errors.Errorf("type %q not found", name)
- }
- }
- s.EntryPoints[key] = t
- }
-
- for entrypointName, typeName := range defaultEntrypoints {
- if _, ok := s.EntryPoints[entrypointName]; ok {
- continue
- }
-
- if _, ok := s.Types[typeName]; !ok {
- continue
- }
-
- s.EntryPoints[entrypointName] = s.Types[typeName]
- }
-
- for _, obj := range s.objects {
- obj.Interfaces = make([]*Interface, len(obj.interfaceNames))
- for i, intfName := range obj.interfaceNames {
- t, ok := s.Types[intfName]
- if !ok {
- return errors.Errorf("interface %q not found", intfName)
- }
- intf, ok := t.(*Interface)
- if !ok {
- return errors.Errorf("type %q is not an interface", intfName)
- }
- obj.Interfaces[i] = intf
- intf.PossibleTypes = append(intf.PossibleTypes, obj)
- }
- }
-
- for _, union := range s.unions {
- union.PossibleTypes = make([]*Object, len(union.typeNames))
- for i, name := range union.typeNames {
- t, ok := s.Types[name]
- if !ok {
- return errors.Errorf("object type %q not found", name)
- }
- obj, ok := t.(*Object)
- if !ok {
- return errors.Errorf("type %q is not an object", name)
- }
- union.PossibleTypes[i] = obj
- }
- }
-
- for _, enum := range s.enums {
- for _, value := range enum.Values {
- if err := resolveDirectives(s, value.Directives); err != nil {
- return err
- }
- }
- }
-
- return nil
-}
-
-func resolveNamedType(s *Schema, t NamedType) error {
- switch t := t.(type) {
- case *Object:
- for _, f := range t.Fields {
- if err := resolveField(s, f); err != nil {
- return err
- }
- }
- case *Interface:
- for _, f := range t.Fields {
- if err := resolveField(s, f); err != nil {
- return err
- }
- }
- case *InputObject:
- if err := resolveInputObject(s, t.Values); err != nil {
- return err
- }
- }
- return nil
-}
-
-func resolveField(s *Schema, f *Field) error {
- t, err := common.ResolveType(f.Type, s.Resolve)
- if err != nil {
- return err
- }
- f.Type = t
- if err := resolveDirectives(s, f.Directives); err != nil {
- return err
- }
- return resolveInputObject(s, f.Args)
-}
-
-func resolveDirectives(s *Schema, directives common.DirectiveList) error {
- for _, d := range directives {
- dirName := d.Name.Name
- dd, ok := s.Directives[dirName]
- if !ok {
- return errors.Errorf("directive %q not found", dirName)
- }
- for _, arg := range d.Args {
- if dd.Args.Get(arg.Name.Name) == nil {
- return errors.Errorf("invalid argument %q for directive %q", arg.Name.Name, dirName)
- }
- }
- for _, arg := range dd.Args {
- if _, ok := d.Args.Get(arg.Name.Name); !ok {
- d.Args = append(d.Args, common.Argument{Name: arg.Name, Value: arg.Default})
- }
- }
- }
- return nil
-}
-
-func resolveInputObject(s *Schema, values common.InputValueList) error {
- for _, v := range values {
- t, err := common.ResolveType(v.Type, s.Resolve)
- if err != nil {
- return err
- }
- v.Type = t
- }
- return nil
-}
-
-func parseSchema(s *Schema, l *common.Lexer) {
- for l.Peek() != scanner.EOF {
- desc := l.DescComment()
- switch x := l.ConsumeIdent(); x {
- case "schema":
- l.ConsumeToken('{')
- for l.Peek() != '}' {
- name := l.ConsumeIdent()
- l.ConsumeToken(':')
- typ := l.ConsumeIdent()
- s.entryPointNames[name] = typ
- }
- l.ConsumeToken('}')
- case "type":
- obj := parseObjectDecl(l)
- obj.Desc = desc
- s.Types[obj.Name] = obj
- s.objects = append(s.objects, obj)
- case "interface":
- intf := parseInterfaceDecl(l)
- intf.Desc = desc
- s.Types[intf.Name] = intf
- case "union":
- union := parseUnionDecl(l)
- union.Desc = desc
- s.Types[union.Name] = union
- s.unions = append(s.unions, union)
- case "enum":
- enum := parseEnumDecl(l)
- enum.Desc = desc
- s.Types[enum.Name] = enum
- s.enums = append(s.enums, enum)
- case "input":
- input := parseInputDecl(l)
- input.Desc = desc
- s.Types[input.Name] = input
- case "scalar":
- name := l.ConsumeIdent()
- s.Types[name] = &Scalar{Name: name, Desc: desc}
- case "directive":
- directive := parseDirectiveDecl(l)
- directive.Desc = desc
- s.Directives[directive.Name] = directive
- default:
- l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "schema", "type", "enum", "interface", "union", "input", "scalar" or "directive"`, x))
- }
- }
-}
-
-func parseObjectDecl(l *common.Lexer) *Object {
- o := &Object{}
- o.Name = l.ConsumeIdent()
- if l.Peek() == scanner.Ident {
- l.ConsumeKeyword("implements")
- for {
- o.interfaceNames = append(o.interfaceNames, l.ConsumeIdent())
- if l.Peek() == '{' {
- break
- }
- }
- }
- l.ConsumeToken('{')
- o.Fields = parseFields(l)
- l.ConsumeToken('}')
- return o
-}
-
-func parseInterfaceDecl(l *common.Lexer) *Interface {
- i := &Interface{}
- i.Name = l.ConsumeIdent()
- l.ConsumeToken('{')
- i.Fields = parseFields(l)
- l.ConsumeToken('}')
- return i
-}
-
-func parseUnionDecl(l *common.Lexer) *Union {
- union := &Union{}
- union.Name = l.ConsumeIdent()
- l.ConsumeToken('=')
- union.typeNames = []string{l.ConsumeIdent()}
- for l.Peek() == '|' {
- l.ConsumeToken('|')
- union.typeNames = append(union.typeNames, l.ConsumeIdent())
- }
- return union
-}
-
-func parseInputDecl(l *common.Lexer) *InputObject {
- i := &InputObject{}
- i.Name = l.ConsumeIdent()
- l.ConsumeToken('{')
- for l.Peek() != '}' {
- i.Values = append(i.Values, common.ParseInputValue(l))
- }
- l.ConsumeToken('}')
- return i
-}
-
-func parseEnumDecl(l *common.Lexer) *Enum {
- enum := &Enum{}
- enum.Name = l.ConsumeIdent()
- l.ConsumeToken('{')
- for l.Peek() != '}' {
- v := &EnumValue{}
- v.Desc = l.DescComment()
- v.Name = l.ConsumeIdent()
- v.Directives = common.ParseDirectives(l)
- enum.Values = append(enum.Values, v)
- }
- l.ConsumeToken('}')
- return enum
-}
-
-func parseDirectiveDecl(l *common.Lexer) *DirectiveDecl {
- d := &DirectiveDecl{}
- l.ConsumeToken('@')
- d.Name = l.ConsumeIdent()
- if l.Peek() == '(' {
- l.ConsumeToken('(')
- for l.Peek() != ')' {
- v := common.ParseInputValue(l)
- d.Args = append(d.Args, v)
- }
- l.ConsumeToken(')')
- }
- l.ConsumeKeyword("on")
- for {
- loc := l.ConsumeIdent()
- d.Locs = append(d.Locs, loc)
- if l.Peek() != '|' {
- break
- }
- l.ConsumeToken('|')
- }
- return d
-}
-
-func parseFields(l *common.Lexer) FieldList {
- var fields FieldList
- for l.Peek() != '}' {
- f := &Field{}
- f.Desc = l.DescComment()
- f.Name = l.ConsumeIdent()
- if l.Peek() == '(' {
- l.ConsumeToken('(')
- for l.Peek() != ')' {
- f.Args = append(f.Args, common.ParseInputValue(l))
- }
- l.ConsumeToken(')')
- }
- l.ConsumeToken(':')
- f.Type = common.ParseType(l)
- f.Directives = common.ParseDirectives(l)
- fields = append(fields, f)
- }
- return fields
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/validation/suggestion.go b/vendor/github.com/vektah/gqlgen/neelance/validation/suggestion.go
deleted file mode 100644
index 9702b5f5..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/validation/suggestion.go
+++ /dev/null
@@ -1,71 +0,0 @@
-package validation
-
-import (
- "fmt"
- "sort"
- "strconv"
- "strings"
-)
-
-func makeSuggestion(prefix string, options []string, input string) string {
- var selected []string
- distances := make(map[string]int)
- for _, opt := range options {
- distance := levenshteinDistance(input, opt)
- threshold := max(len(input)/2, max(len(opt)/2, 1))
- if distance < threshold {
- selected = append(selected, opt)
- distances[opt] = distance
- }
- }
-
- if len(selected) == 0 {
- return ""
- }
- sort.Slice(selected, func(i, j int) bool {
- return distances[selected[i]] < distances[selected[j]]
- })
-
- parts := make([]string, len(selected))
- for i, opt := range selected {
- parts[i] = strconv.Quote(opt)
- }
- if len(parts) > 1 {
- parts[len(parts)-1] = "or " + parts[len(parts)-1]
- }
- return fmt.Sprintf(" %s %s?", prefix, strings.Join(parts, ", "))
-}
-
-func levenshteinDistance(s1, s2 string) int {
- column := make([]int, len(s1)+1)
- for y := range s1 {
- column[y+1] = y + 1
- }
- for x, rx := range s2 {
- column[0] = x + 1
- lastdiag := x
- for y, ry := range s1 {
- olddiag := column[y+1]
- if rx != ry {
- lastdiag++
- }
- column[y+1] = min(column[y+1]+1, min(column[y]+1, lastdiag))
- lastdiag = olddiag
- }
- }
- return column[len(s1)]
-}
-
-func min(a, b int) int {
- if a < b {
- return a
- }
- return b
-}
-
-func max(a, b int) int {
- if a > b {
- return a
- }
- return b
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/validation/validation.go b/vendor/github.com/vektah/gqlgen/neelance/validation/validation.go
deleted file mode 100644
index 28124310..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/validation/validation.go
+++ /dev/null
@@ -1,861 +0,0 @@
-package validation
-
-import (
- "fmt"
- "math"
- "reflect"
- "strconv"
- "strings"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/errors"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-type varSet map[*common.InputValue]struct{}
-
-type selectionPair struct{ a, b query.Selection }
-
-type fieldInfo struct {
- sf *schema.Field
- parent schema.NamedType
-}
-
-type context struct {
- schema *schema.Schema
- doc *query.Document
- errs []*errors.QueryError
- opErrs map[*query.Operation][]*errors.QueryError
- usedVars map[*query.Operation]varSet
- fieldMap map[*query.Field]fieldInfo
- overlapValidated map[selectionPair]struct{}
-}
-
-func (c *context) addErr(loc errors.Location, rule string, format string, a ...interface{}) {
- c.addErrMultiLoc([]errors.Location{loc}, rule, format, a...)
-}
-
-func (c *context) addErrMultiLoc(locs []errors.Location, rule string, format string, a ...interface{}) {
- c.errs = append(c.errs, &errors.QueryError{
- Message: fmt.Sprintf(format, a...),
- Locations: locs,
- Rule: rule,
- })
-}
-
-type opContext struct {
- *context
- ops []*query.Operation
-}
-
-func Validate(s *schema.Schema, doc *query.Document) []*errors.QueryError {
- c := &context{
- schema: s,
- doc: doc,
- opErrs: make(map[*query.Operation][]*errors.QueryError),
- usedVars: make(map[*query.Operation]varSet),
- fieldMap: make(map[*query.Field]fieldInfo),
- overlapValidated: make(map[selectionPair]struct{}),
- }
-
- opNames := make(nameSet)
- fragUsedBy := make(map[*query.FragmentDecl][]*query.Operation)
- for _, op := range doc.Operations {
- c.usedVars[op] = make(varSet)
- opc := &opContext{c, []*query.Operation{op}}
-
- if op.Name.Name == "" && len(doc.Operations) != 1 {
- c.addErr(op.Loc, "LoneAnonymousOperation", "This anonymous operation must be the only defined operation.")
- }
- if op.Name.Name != "" {
- validateName(c, opNames, op.Name, "UniqueOperationNames", "operation")
- }
-
- validateDirectives(opc, string(op.Type), op.Directives)
-
- varNames := make(nameSet)
- for _, v := range op.Vars {
- validateName(c, varNames, v.Name, "UniqueVariableNames", "variable")
-
- t := resolveType(c, v.Type)
- if !canBeInput(t) {
- c.addErr(v.TypeLoc, "VariablesAreInputTypes", "Variable %q cannot be non-input type %q.", "$"+v.Name.Name, t)
- }
-
- if v.Default != nil {
- validateLiteral(opc, v.Default)
-
- if t != nil {
- if nn, ok := t.(*common.NonNull); ok {
- c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q is required and will not use the default value. Perhaps you meant to use type %q.", "$"+v.Name.Name, t, nn.OfType)
- }
-
- if ok, reason := validateValueType(opc, v.Default, t); !ok {
- c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q has invalid default value %s.\n%s", "$"+v.Name.Name, t, v.Default, reason)
- }
- }
- }
- }
-
- var entryPoint schema.NamedType
- switch op.Type {
- case query.Query:
- entryPoint = s.EntryPoints["query"]
- case query.Mutation:
- entryPoint = s.EntryPoints["mutation"]
- case query.Subscription:
- entryPoint = s.EntryPoints["subscription"]
- default:
- panic("unreachable")
- }
-
- validateSelectionSet(opc, op.Selections, entryPoint)
-
- fragUsed := make(map[*query.FragmentDecl]struct{})
- markUsedFragments(c, op.Selections, fragUsed)
- for frag := range fragUsed {
- fragUsedBy[frag] = append(fragUsedBy[frag], op)
- }
- }
-
- fragNames := make(nameSet)
- fragVisited := make(map[*query.FragmentDecl]struct{})
- for _, frag := range doc.Fragments {
- opc := &opContext{c, fragUsedBy[frag]}
-
- validateName(c, fragNames, frag.Name, "UniqueFragmentNames", "fragment")
- validateDirectives(opc, "FRAGMENT_DEFINITION", frag.Directives)
-
- t := unwrapType(resolveType(c, &frag.On))
- // continue even if t is nil
- if t != nil && !canBeFragment(t) {
- c.addErr(frag.On.Loc, "FragmentsOnCompositeTypes", "Fragment %q cannot condition on non composite type %q.", frag.Name.Name, t)
- continue
- }
-
- validateSelectionSet(opc, frag.Selections, t)
-
- if _, ok := fragVisited[frag]; !ok {
- detectFragmentCycle(c, frag.Selections, fragVisited, nil, map[string]int{frag.Name.Name: 0})
- }
- }
-
- for _, frag := range doc.Fragments {
- if len(fragUsedBy[frag]) == 0 {
- c.addErr(frag.Loc, "NoUnusedFragments", "Fragment %q is never used.", frag.Name.Name)
- }
- }
-
- for _, op := range doc.Operations {
- c.errs = append(c.errs, c.opErrs[op]...)
-
- opUsedVars := c.usedVars[op]
- for _, v := range op.Vars {
- if _, ok := opUsedVars[v]; !ok {
- opSuffix := ""
- if op.Name.Name != "" {
- opSuffix = fmt.Sprintf(" in operation %q", op.Name.Name)
- }
- c.addErr(v.Loc, "NoUnusedVariables", "Variable %q is never used%s.", "$"+v.Name.Name, opSuffix)
- }
- }
- }
-
- return c.errs
-}
-
-func validateSelectionSet(c *opContext, sels []query.Selection, t schema.NamedType) {
- for _, sel := range sels {
- validateSelection(c, sel, t)
- }
-
- for i, a := range sels {
- for _, b := range sels[i+1:] {
- c.validateOverlap(a, b, nil, nil)
- }
- }
-}
-
-func validateSelection(c *opContext, sel query.Selection, t schema.NamedType) {
- switch sel := sel.(type) {
- case *query.Field:
- validateDirectives(c, "FIELD", sel.Directives)
-
- fieldName := sel.Name.Name
- var f *schema.Field
- switch fieldName {
- case "__typename":
- f = &schema.Field{
- Name: "__typename",
- Type: c.schema.Types["String"],
- }
- case "__schema":
- f = &schema.Field{
- Name: "__schema",
- Type: c.schema.Types["__Schema"],
- }
- case "__type":
- f = &schema.Field{
- Name: "__type",
- Args: common.InputValueList{
- &common.InputValue{
- Name: common.Ident{Name: "name"},
- Type: &common.NonNull{OfType: c.schema.Types["String"]},
- },
- },
- Type: c.schema.Types["__Type"],
- }
- default:
- f = fields(t).Get(fieldName)
- if f == nil && t != nil {
- suggestion := makeSuggestion("Did you mean", fields(t).Names(), fieldName)
- c.addErr(sel.Alias.Loc, "FieldsOnCorrectType", "Cannot query field %q on type %q.%s", fieldName, t, suggestion)
- }
- }
- c.fieldMap[sel] = fieldInfo{sf: f, parent: t}
-
- validateArgumentLiterals(c, sel.Arguments)
- if f != nil {
- validateArgumentTypes(c, sel.Arguments, f.Args, sel.Alias.Loc,
- func() string { return fmt.Sprintf("field %q of type %q", fieldName, t) },
- func() string { return fmt.Sprintf("Field %q", fieldName) },
- )
- }
-
- var ft common.Type
- if f != nil {
- ft = f.Type
- sf := hasSubfields(ft)
- if sf && sel.Selections == nil {
- c.addErr(sel.Alias.Loc, "ScalarLeafs", "Field %q of type %q must have a selection of subfields. Did you mean \"%s { ... }\"?", fieldName, ft, fieldName)
- }
- if !sf && sel.Selections != nil {
- c.addErr(sel.SelectionSetLoc, "ScalarLeafs", "Field %q must not have a selection since type %q has no subfields.", fieldName, ft)
- }
- }
- if sel.Selections != nil {
- validateSelectionSet(c, sel.Selections, unwrapType(ft))
- }
-
- case *query.InlineFragment:
- validateDirectives(c, "INLINE_FRAGMENT", sel.Directives)
- if sel.On.Name != "" {
- fragTyp := unwrapType(resolveType(c.context, &sel.On))
- if fragTyp != nil && !compatible(t, fragTyp) {
- c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment cannot be spread here as objects of type %q can never be of type %q.", t, fragTyp)
- }
- t = fragTyp
- // continue even if t is nil
- }
- if t != nil && !canBeFragment(t) {
- c.addErr(sel.On.Loc, "FragmentsOnCompositeTypes", "Fragment cannot condition on non composite type %q.", t)
- return
- }
- validateSelectionSet(c, sel.Selections, unwrapType(t))
-
- case *query.FragmentSpread:
- validateDirectives(c, "FRAGMENT_SPREAD", sel.Directives)
- frag := c.doc.Fragments.Get(sel.Name.Name)
- if frag == nil {
- c.addErr(sel.Name.Loc, "KnownFragmentNames", "Unknown fragment %q.", sel.Name.Name)
- return
- }
- fragTyp := c.schema.Types[frag.On.Name]
- if !compatible(t, fragTyp) {
- c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment %q cannot be spread here as objects of type %q can never be of type %q.", frag.Name.Name, t, fragTyp)
- }
-
- default:
- panic("unreachable")
- }
-}
-
-func compatible(a, b common.Type) bool {
- for _, pta := range possibleTypes(a) {
- for _, ptb := range possibleTypes(b) {
- if pta == ptb {
- return true
- }
- }
- }
- return false
-}
-
-func possibleTypes(t common.Type) []*schema.Object {
- switch t := t.(type) {
- case *schema.Object:
- return []*schema.Object{t}
- case *schema.Interface:
- return t.PossibleTypes
- case *schema.Union:
- return t.PossibleTypes
- default:
- return nil
- }
-}
-
-func markUsedFragments(c *context, sels []query.Selection, fragUsed map[*query.FragmentDecl]struct{}) {
- for _, sel := range sels {
- switch sel := sel.(type) {
- case *query.Field:
- if sel.Selections != nil {
- markUsedFragments(c, sel.Selections, fragUsed)
- }
-
- case *query.InlineFragment:
- markUsedFragments(c, sel.Selections, fragUsed)
-
- case *query.FragmentSpread:
- frag := c.doc.Fragments.Get(sel.Name.Name)
- if frag == nil {
- return
- }
-
- if _, ok := fragUsed[frag]; ok {
- return
- }
- fragUsed[frag] = struct{}{}
- markUsedFragments(c, frag.Selections, fragUsed)
-
- default:
- panic("unreachable")
- }
- }
-}
-
-func detectFragmentCycle(c *context, sels []query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) {
- for _, sel := range sels {
- detectFragmentCycleSel(c, sel, fragVisited, spreadPath, spreadPathIndex)
- }
-}
-
-func detectFragmentCycleSel(c *context, sel query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) {
- switch sel := sel.(type) {
- case *query.Field:
- if sel.Selections != nil {
- detectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex)
- }
-
- case *query.InlineFragment:
- detectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex)
-
- case *query.FragmentSpread:
- frag := c.doc.Fragments.Get(sel.Name.Name)
- if frag == nil {
- return
- }
-
- spreadPath = append(spreadPath, sel)
- if i, ok := spreadPathIndex[frag.Name.Name]; ok {
- cyclePath := spreadPath[i:]
- via := ""
- if len(cyclePath) > 1 {
- names := make([]string, len(cyclePath)-1)
- for i, frag := range cyclePath[:len(cyclePath)-1] {
- names[i] = frag.Name.Name
- }
- via = " via " + strings.Join(names, ", ")
- }
-
- locs := make([]errors.Location, len(cyclePath))
- for i, frag := range cyclePath {
- locs[i] = frag.Loc
- }
- c.addErrMultiLoc(locs, "NoFragmentCycles", "Cannot spread fragment %q within itself%s.", frag.Name.Name, via)
- return
- }
-
- if _, ok := fragVisited[frag]; ok {
- return
- }
- fragVisited[frag] = struct{}{}
-
- spreadPathIndex[frag.Name.Name] = len(spreadPath)
- detectFragmentCycle(c, frag.Selections, fragVisited, spreadPath, spreadPathIndex)
- delete(spreadPathIndex, frag.Name.Name)
-
- default:
- panic("unreachable")
- }
-}
-
-func (c *context) validateOverlap(a, b query.Selection, reasons *[]string, locs *[]errors.Location) {
- if a == b {
- return
- }
-
- if _, ok := c.overlapValidated[selectionPair{a, b}]; ok {
- return
- }
- c.overlapValidated[selectionPair{a, b}] = struct{}{}
- c.overlapValidated[selectionPair{b, a}] = struct{}{}
-
- switch a := a.(type) {
- case *query.Field:
- switch b := b.(type) {
- case *query.Field:
- if b.Alias.Loc.Before(a.Alias.Loc) {
- a, b = b, a
- }
- if reasons2, locs2 := c.validateFieldOverlap(a, b); len(reasons2) != 0 {
- locs2 = append(locs2, a.Alias.Loc, b.Alias.Loc)
- if reasons == nil {
- c.addErrMultiLoc(locs2, "OverlappingFieldsCanBeMerged", "Fields %q conflict because %s. Use different aliases on the fields to fetch both if this was intentional.", a.Alias.Name, strings.Join(reasons2, " and "))
- return
- }
- for _, r := range reasons2 {
- *reasons = append(*reasons, fmt.Sprintf("subfields %q conflict because %s", a.Alias.Name, r))
- }
- *locs = append(*locs, locs2...)
- }
-
- case *query.InlineFragment:
- for _, sel := range b.Selections {
- c.validateOverlap(a, sel, reasons, locs)
- }
-
- case *query.FragmentSpread:
- if frag := c.doc.Fragments.Get(b.Name.Name); frag != nil {
- for _, sel := range frag.Selections {
- c.validateOverlap(a, sel, reasons, locs)
- }
- }
-
- default:
- panic("unreachable")
- }
-
- case *query.InlineFragment:
- for _, sel := range a.Selections {
- c.validateOverlap(sel, b, reasons, locs)
- }
-
- case *query.FragmentSpread:
- if frag := c.doc.Fragments.Get(a.Name.Name); frag != nil {
- for _, sel := range frag.Selections {
- c.validateOverlap(sel, b, reasons, locs)
- }
- }
-
- default:
- panic("unreachable")
- }
-}
-
-func (c *context) validateFieldOverlap(a, b *query.Field) ([]string, []errors.Location) {
- if a.Alias.Name != b.Alias.Name {
- return nil, nil
- }
-
- if asf := c.fieldMap[a].sf; asf != nil {
- if bsf := c.fieldMap[b].sf; bsf != nil {
- if !typesCompatible(asf.Type, bsf.Type) {
- return []string{fmt.Sprintf("they return conflicting types %s and %s", asf.Type, bsf.Type)}, nil
- }
- }
- }
-
- at := c.fieldMap[a].parent
- bt := c.fieldMap[b].parent
- if at == nil || bt == nil || at == bt {
- if a.Name.Name != b.Name.Name {
- return []string{fmt.Sprintf("%s and %s are different fields", a.Name.Name, b.Name.Name)}, nil
- }
-
- if argumentsConflict(a.Arguments, b.Arguments) {
- return []string{"they have differing arguments"}, nil
- }
- }
-
- var reasons []string
- var locs []errors.Location
- for _, a2 := range a.Selections {
- for _, b2 := range b.Selections {
- c.validateOverlap(a2, b2, &reasons, &locs)
- }
- }
- return reasons, locs
-}
-
-func argumentsConflict(a, b common.ArgumentList) bool {
- if len(a) != len(b) {
- return true
- }
- for _, argA := range a {
- valB, ok := b.Get(argA.Name.Name)
- if !ok || !reflect.DeepEqual(argA.Value.Value(nil), valB.Value(nil)) {
- return true
- }
- }
- return false
-}
-
-func fields(t common.Type) schema.FieldList {
- switch t := t.(type) {
- case *schema.Object:
- return t.Fields
- case *schema.Interface:
- return t.Fields
- default:
- return nil
- }
-}
-
-func unwrapType(t common.Type) schema.NamedType {
- if t == nil {
- return nil
- }
- for {
- switch t2 := t.(type) {
- case schema.NamedType:
- return t2
- case *common.List:
- t = t2.OfType
- case *common.NonNull:
- t = t2.OfType
- default:
- panic("unreachable")
- }
- }
-}
-
-func resolveType(c *context, t common.Type) common.Type {
- t2, err := common.ResolveType(t, c.schema.Resolve)
- if err != nil {
- c.errs = append(c.errs, err)
- }
- return t2
-}
-
-func validateDirectives(c *opContext, loc string, directives common.DirectiveList) {
- directiveNames := make(nameSet)
- for _, d := range directives {
- dirName := d.Name.Name
- validateNameCustomMsg(c.context, directiveNames, d.Name, "UniqueDirectivesPerLocation", func() string {
- return fmt.Sprintf("The directive %q can only be used once at this location.", dirName)
- })
-
- validateArgumentLiterals(c, d.Args)
-
- dd, ok := c.schema.Directives[dirName]
- if !ok {
- c.addErr(d.Name.Loc, "KnownDirectives", "Unknown directive %q.", dirName)
- continue
- }
-
- locOK := false
- for _, allowedLoc := range dd.Locs {
- if loc == allowedLoc {
- locOK = true
- break
- }
- }
- if !locOK {
- c.addErr(d.Name.Loc, "KnownDirectives", "Directive %q may not be used on %s.", dirName, loc)
- }
-
- validateArgumentTypes(c, d.Args, dd.Args, d.Name.Loc,
- func() string { return fmt.Sprintf("directive %q", "@"+dirName) },
- func() string { return fmt.Sprintf("Directive %q", "@"+dirName) },
- )
- }
-}
-
-type nameSet map[string]errors.Location
-
-func validateName(c *context, set nameSet, name common.Ident, rule string, kind string) {
- validateNameCustomMsg(c, set, name, rule, func() string {
- return fmt.Sprintf("There can be only one %s named %q.", kind, name.Name)
- })
-}
-
-func validateNameCustomMsg(c *context, set nameSet, name common.Ident, rule string, msg func() string) {
- if loc, ok := set[name.Name]; ok {
- c.addErrMultiLoc([]errors.Location{loc, name.Loc}, rule, msg())
- return
- }
- set[name.Name] = name.Loc
-}
-
-func validateArgumentTypes(c *opContext, args common.ArgumentList, argDecls common.InputValueList, loc errors.Location, owner1, owner2 func() string) {
- for _, selArg := range args {
- arg := argDecls.Get(selArg.Name.Name)
- if arg == nil {
- c.addErr(selArg.Name.Loc, "KnownArgumentNames", "Unknown argument %q on %s.", selArg.Name.Name, owner1())
- continue
- }
- value := selArg.Value
- if ok, reason := validateValueType(c, value, arg.Type); !ok {
- c.addErr(value.Location(), "ArgumentsOfCorrectType", "Argument %q has invalid value %s.\n%s", arg.Name.Name, value, reason)
- }
- }
- for _, decl := range argDecls {
- if _, ok := decl.Type.(*common.NonNull); ok {
- if _, ok := args.Get(decl.Name.Name); !ok {
- c.addErr(loc, "ProvidedNonNullArguments", "%s argument %q of type %q is required but not provided.", owner2(), decl.Name.Name, decl.Type)
- }
- }
- }
-}
-
-func validateArgumentLiterals(c *opContext, args common.ArgumentList) {
- argNames := make(nameSet)
- for _, arg := range args {
- validateName(c.context, argNames, arg.Name, "UniqueArgumentNames", "argument")
- validateLiteral(c, arg.Value)
- }
-}
-
-func validateLiteral(c *opContext, l common.Literal) {
- switch l := l.(type) {
- case *common.ObjectLit:
- fieldNames := make(nameSet)
- for _, f := range l.Fields {
- validateName(c.context, fieldNames, f.Name, "UniqueInputFieldNames", "input field")
- validateLiteral(c, f.Value)
- }
- case *common.ListLit:
- for _, entry := range l.Entries {
- validateLiteral(c, entry)
- }
- case *common.Variable:
- for _, op := range c.ops {
- v := op.Vars.Get(l.Name)
- if v == nil {
- byOp := ""
- if op.Name.Name != "" {
- byOp = fmt.Sprintf(" by operation %q", op.Name.Name)
- }
- c.opErrs[op] = append(c.opErrs[op], &errors.QueryError{
- Message: fmt.Sprintf("Variable %q is not defined%s.", "$"+l.Name, byOp),
- Locations: []errors.Location{l.Loc, op.Loc},
- Rule: "NoUndefinedVariables",
- })
- continue
- }
- c.usedVars[op][v] = struct{}{}
- }
- }
-}
-
-func validateValueType(c *opContext, v common.Literal, t common.Type) (bool, string) {
- if v, ok := v.(*common.Variable); ok {
- for _, op := range c.ops {
- if v2 := op.Vars.Get(v.Name); v2 != nil {
- t2, err := common.ResolveType(v2.Type, c.schema.Resolve)
- if _, ok := t2.(*common.NonNull); !ok && v2.Default != nil {
- t2 = &common.NonNull{OfType: t2}
- }
- if err == nil && !typeCanBeUsedAs(t2, t) {
- c.addErrMultiLoc([]errors.Location{v2.Loc, v.Loc}, "VariablesInAllowedPosition", "Variable %q of type %q used in position expecting type %q.", "$"+v.Name, t2, t)
- }
- }
- }
- return true, ""
- }
-
- if nn, ok := t.(*common.NonNull); ok {
- if isNull(v) {
- return false, fmt.Sprintf("Expected %q, found null.", t)
- }
- t = nn.OfType
- }
- if isNull(v) {
- return true, ""
- }
-
- switch t := t.(type) {
- case *schema.Scalar, *schema.Enum:
- if lit, ok := v.(*common.BasicLit); ok {
- if validateBasicLit(lit, t) {
- return true, ""
- }
- } else {
- // custom complex scalars will be validated when unmarshaling
- return true, ""
- }
-
- case *common.List:
- list, ok := v.(*common.ListLit)
- if !ok {
- return validateValueType(c, v, t.OfType) // single value instead of list
- }
- for i, entry := range list.Entries {
- if ok, reason := validateValueType(c, entry, t.OfType); !ok {
- return false, fmt.Sprintf("In element #%d: %s", i, reason)
- }
- }
- return true, ""
-
- case *schema.InputObject:
- v, ok := v.(*common.ObjectLit)
- if !ok {
- return false, fmt.Sprintf("Expected %q, found not an object.", t)
- }
- for _, f := range v.Fields {
- name := f.Name.Name
- iv := t.Values.Get(name)
- if iv == nil {
- return false, fmt.Sprintf("In field %q: Unknown field.", name)
- }
- if ok, reason := validateValueType(c, f.Value, iv.Type); !ok {
- return false, fmt.Sprintf("In field %q: %s", name, reason)
- }
- }
- for _, iv := range t.Values {
- found := false
- for _, f := range v.Fields {
- if f.Name.Name == iv.Name.Name {
- found = true
- break
- }
- }
- if !found {
- if _, ok := iv.Type.(*common.NonNull); ok && iv.Default == nil {
- return false, fmt.Sprintf("In field %q: Expected %q, found null.", iv.Name.Name, iv.Type)
- }
- }
- }
- return true, ""
- }
-
- return false, fmt.Sprintf("Expected type %q, found %s.", t, v)
-}
-
-func validateBasicLit(v *common.BasicLit, t common.Type) bool {
- switch t := t.(type) {
- case *schema.Scalar:
- switch t.Name {
- case "Int":
- if v.Type != scanner.Int {
- return false
- }
- f, err := strconv.ParseFloat(v.Text, 64)
- if err != nil {
- panic(err)
- }
- return f >= math.MinInt32 && f <= math.MaxInt32
- case "Float":
- return v.Type == scanner.Int || v.Type == scanner.Float
- case "String":
- return v.Type == scanner.String
- case "Boolean":
- return v.Type == scanner.Ident && (v.Text == "true" || v.Text == "false")
- case "ID":
- return v.Type == scanner.Int || v.Type == scanner.String
- default:
- //TODO: Type-check against expected type by Unmarshaling
- return true
- }
-
- case *schema.Enum:
- if v.Type != scanner.Ident {
- return false
- }
- for _, option := range t.Values {
- if option.Name == v.Text {
- return true
- }
- }
- return false
- }
-
- return false
-}
-
-func canBeFragment(t common.Type) bool {
- switch t.(type) {
- case *schema.Object, *schema.Interface, *schema.Union:
- return true
- default:
- return false
- }
-}
-
-func canBeInput(t common.Type) bool {
- switch t := t.(type) {
- case *schema.InputObject, *schema.Scalar, *schema.Enum:
- return true
- case *common.List:
- return canBeInput(t.OfType)
- case *common.NonNull:
- return canBeInput(t.OfType)
- default:
- return false
- }
-}
-
-func hasSubfields(t common.Type) bool {
- switch t := t.(type) {
- case *schema.Object, *schema.Interface, *schema.Union:
- return true
- case *common.List:
- return hasSubfields(t.OfType)
- case *common.NonNull:
- return hasSubfields(t.OfType)
- default:
- return false
- }
-}
-
-func isLeaf(t common.Type) bool {
- switch t.(type) {
- case *schema.Scalar, *schema.Enum:
- return true
- default:
- return false
- }
-}
-
-func isNull(lit interface{}) bool {
- _, ok := lit.(*common.NullLit)
- return ok
-}
-
-func typesCompatible(a, b common.Type) bool {
- al, aIsList := a.(*common.List)
- bl, bIsList := b.(*common.List)
- if aIsList || bIsList {
- return aIsList && bIsList && typesCompatible(al.OfType, bl.OfType)
- }
-
- ann, aIsNN := a.(*common.NonNull)
- bnn, bIsNN := b.(*common.NonNull)
- if aIsNN || bIsNN {
- return aIsNN && bIsNN && typesCompatible(ann.OfType, bnn.OfType)
- }
-
- if isLeaf(a) || isLeaf(b) {
- return a == b
- }
-
- return true
-}
-
-func typeCanBeUsedAs(t, as common.Type) bool {
- nnT, okT := t.(*common.NonNull)
- if okT {
- t = nnT.OfType
- }
-
- nnAs, okAs := as.(*common.NonNull)
- if okAs {
- as = nnAs.OfType
- if !okT {
- return false // nullable can not be used as non-null
- }
- }
-
- if t == as {
- return true
- }
-
- if lT, ok := t.(*common.List); ok {
- if lAs, ok := as.(*common.List); ok {
- return typeCanBeUsedAs(lT.OfType, lAs.OfType)
- }
- }
- return false
-}
diff --git a/vendor/github.com/vektah/gqlparser/.gitignore b/vendor/github.com/vektah/gqlparser/.gitignore
new file mode 100644
index 00000000..877392a7
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/.gitignore
@@ -0,0 +1,5 @@
+/vendor
+/validator/imported/node_modules
+/validator/imported/graphql-js
+
+.idea/
diff --git a/vendor/github.com/vektah/gqlparser/.gometalinter.json b/vendor/github.com/vektah/gqlparser/.gometalinter.json
new file mode 100644
index 00000000..e4e00223
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/.gometalinter.json
@@ -0,0 +1,13 @@
+{
+ "sort": ["path"],
+ "Deadline": "5m",
+ "Linters": {
+ "errcheck": {
+ "Command": "errcheck -abspath -ignore '[rR]ead|[wW]rite|Close'",
+ "Pattern": "PATH:LINE:COL:MESSAGE",
+ "InstallFrom": "github.com/kisielk/errcheck",
+ "PartitionStrategy": "packages"
+ }
+ },
+ "Disable": ["golint","gocyclo", "goconst", "gas", "interfacer", "vet","gosec"]
+}
diff --git a/vendor/github.com/vektah/gqlgen/LICENSE b/vendor/github.com/vektah/gqlparser/LICENSE
index 18e1b249..1221b9d3 100644
--- a/vendor/github.com/vektah/gqlgen/LICENSE
+++ b/vendor/github.com/vektah/gqlparser/LICENSE
@@ -16,4 +16,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+SOFTWARE. \ No newline at end of file
diff --git a/vendor/github.com/vektah/gqlparser/ast/argmap.go b/vendor/github.com/vektah/gqlparser/ast/argmap.go
new file mode 100644
index 00000000..43f6a3d6
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/argmap.go
@@ -0,0 +1,37 @@
+package ast
+
+func arg2map(defs ArgumentDefinitionList, args ArgumentList, vars map[string]interface{}) map[string]interface{} {
+ result := map[string]interface{}{}
+ var err error
+
+ for _, argDef := range defs {
+ var val interface{}
+ var hasValue bool
+
+ if argValue := args.ForName(argDef.Name); argValue != nil {
+ if argValue.Value.Kind == Variable {
+ val, hasValue = vars[argValue.Value.Raw]
+ } else {
+ val, err = argValue.Value.Value(vars)
+ if err != nil {
+ panic(err)
+ }
+ hasValue = true
+ }
+ }
+
+ if !hasValue && argDef.DefaultValue != nil {
+ val, err = argDef.DefaultValue.Value(vars)
+ if err != nil {
+ panic(err)
+ }
+ hasValue = true
+ }
+
+ if hasValue {
+ result[argDef.Name] = val
+ }
+ }
+
+ return result
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/collections.go b/vendor/github.com/vektah/gqlparser/ast/collections.go
new file mode 100644
index 00000000..6bf67297
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/collections.go
@@ -0,0 +1,138 @@
+package ast
+
+type FieldList []*FieldDefinition
+
+func (l FieldList) ForName(name string) *FieldDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type EnumValueList []*EnumValueDefinition
+
+func (l EnumValueList) ForName(name string) *EnumValueDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type DirectiveList []*Directive
+
+func (l DirectiveList) ForName(name string) *Directive {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type OperationList []*OperationDefinition
+
+func (l OperationList) ForName(name string) *OperationDefinition {
+ if name == "" && len(l) == 1 {
+ return l[0]
+ }
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type FragmentDefinitionList []*FragmentDefinition
+
+func (l FragmentDefinitionList) ForName(name string) *FragmentDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type VariableDefinitionList []*VariableDefinition
+
+func (l VariableDefinitionList) ForName(name string) *VariableDefinition {
+ for _, it := range l {
+ if it.Variable == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type ArgumentList []*Argument
+
+func (l ArgumentList) ForName(name string) *Argument {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type ArgumentDefinitionList []*ArgumentDefinition
+
+func (l ArgumentDefinitionList) ForName(name string) *ArgumentDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type SchemaDefinitionList []*SchemaDefinition
+
+type DirectiveDefinitionList []*DirectiveDefinition
+
+func (l DirectiveDefinitionList) ForName(name string) *DirectiveDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type DefinitionList []*Definition
+
+func (l DefinitionList) ForName(name string) *Definition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type OperationTypeDefinitionList []*OperationTypeDefinition
+
+func (l OperationTypeDefinitionList) ForType(name string) *OperationTypeDefinition {
+ for _, it := range l {
+ if it.Type == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type ChildValueList []*ChildValue
+
+func (v ChildValueList) ForName(name string) *Value {
+ for _, f := range v {
+ if f.Name == name {
+ return f.Value
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/definition.go b/vendor/github.com/vektah/gqlparser/ast/definition.go
new file mode 100644
index 00000000..74f4ece5
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/definition.go
@@ -0,0 +1,92 @@
+package ast
+
+type DefinitionKind string
+
+const (
+ Scalar DefinitionKind = "SCALAR"
+ Object DefinitionKind = "OBJECT"
+ Interface DefinitionKind = "INTERFACE"
+ Union DefinitionKind = "UNION"
+ Enum DefinitionKind = "ENUM"
+ InputObject DefinitionKind = "INPUT_OBJECT"
+)
+
+// ObjectDefinition is the core type definition object, it includes all of the definable types
+// but does *not* cover schema or directives.
+//
+// @vektah: Javascript implementation has different types for all of these, but they are
+// more similar than different and don't define any behaviour. I think this style of
+// "some hot" struct works better, at least for go.
+//
+// Type extensions are also represented by this same struct.
+type Definition struct {
+ Kind DefinitionKind
+ Description string
+ Name string
+ Directives DirectiveList
+ Interfaces []string // object and input object
+ Fields FieldList // object and input object
+ Types []string // union
+ EnumValues EnumValueList // enum
+
+ Position *Position `dump:"-"`
+}
+
+func (d *Definition) IsLeafType() bool {
+ return d.Kind == Enum || d.Kind == Scalar
+}
+
+func (d *Definition) IsAbstractType() bool {
+ return d.Kind == Interface || d.Kind == Union
+}
+
+func (d *Definition) IsCompositeType() bool {
+ return d.Kind == Object || d.Kind == Interface || d.Kind == Union
+}
+
+func (d *Definition) IsInputType() bool {
+ return d.Kind == Scalar || d.Kind == Enum || d.Kind == InputObject
+}
+
+func (d *Definition) OneOf(types ...string) bool {
+ for _, t := range types {
+ if d.Name == t {
+ return true
+ }
+ }
+ return false
+}
+
+type FieldDefinition struct {
+ Description string
+ Name string
+ Arguments ArgumentDefinitionList // only for objects
+ DefaultValue *Value // only for input objects
+ Type *Type
+ Directives DirectiveList
+ Position *Position `dump:"-"`
+}
+
+type ArgumentDefinition struct {
+ Description string
+ Name string
+ DefaultValue *Value
+ Type *Type
+ Directives DirectiveList
+ Position *Position `dump:"-"`
+}
+
+type EnumValueDefinition struct {
+ Description string
+ Name string
+ Directives DirectiveList
+ Position *Position `dump:"-"`
+}
+
+type DirectiveDefinition struct {
+ Description string
+ Name string
+ Arguments ArgumentDefinitionList
+ Locations []DirectiveLocation
+ Position *Position `dump:"-"`
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/directive.go b/vendor/github.com/vektah/gqlparser/ast/directive.go
new file mode 100644
index 00000000..9b07c92a
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/directive.go
@@ -0,0 +1,42 @@
+package ast
+
+type DirectiveLocation string
+
+const (
+ // Executable
+ LocationQuery DirectiveLocation = `QUERY`
+ LocationMutation DirectiveLocation = `MUTATION`
+ LocationSubscription DirectiveLocation = `SUBSCRIPTION`
+ LocationField DirectiveLocation = `FIELD`
+ LocationFragmentDefinition DirectiveLocation = `FRAGMENT_DEFINITION`
+ LocationFragmentSpread DirectiveLocation = `FRAGMENT_SPREAD`
+ LocationInlineFragment DirectiveLocation = `INLINE_FRAGMENT`
+
+ // Type System
+ LocationSchema DirectiveLocation = `SCHEMA`
+ LocationScalar DirectiveLocation = `SCALAR`
+ LocationObject DirectiveLocation = `OBJECT`
+ LocationFieldDefinition DirectiveLocation = `FIELD_DEFINITION`
+ LocationArgumentDefinition DirectiveLocation = `ARGUMENT_DEFINITION`
+ LocationInterface DirectiveLocation = `INTERFACE`
+ LocationUnion DirectiveLocation = `UNION`
+ LocationEnum DirectiveLocation = `ENUM`
+ LocationEnumValue DirectiveLocation = `ENUM_VALUE`
+ LocationInputObject DirectiveLocation = `INPUT_OBJECT`
+ LocationInputFieldDefinition DirectiveLocation = `INPUT_FIELD_DEFINITION`
+)
+
+type Directive struct {
+ Name string
+ Arguments ArgumentList
+ Position *Position `dump:"-"`
+
+ // Requires validation
+ ParentDefinition *Definition
+ Definition *DirectiveDefinition
+ Location DirectiveLocation
+}
+
+func (d *Directive) ArgumentMap(vars map[string]interface{}) map[string]interface{} {
+ return arg2map(d.Definition.Arguments, d.Arguments, vars)
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/document.go b/vendor/github.com/vektah/gqlparser/ast/document.go
new file mode 100644
index 00000000..b7657d62
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/document.go
@@ -0,0 +1,65 @@
+package ast
+
+type QueryDocument struct {
+ Operations OperationList
+ Fragments FragmentDefinitionList
+ Position *Position `dump:"-"`
+}
+
+type SchemaDocument struct {
+ Schema SchemaDefinitionList
+ SchemaExtension SchemaDefinitionList
+ Directives DirectiveDefinitionList
+ Definitions DefinitionList
+ Extensions DefinitionList
+ Position *Position `dump:"-"`
+}
+
+func (d *SchemaDocument) Merge(other *SchemaDocument) {
+ d.Schema = append(d.Schema, other.Schema...)
+ d.SchemaExtension = append(d.SchemaExtension, other.SchemaExtension...)
+ d.Directives = append(d.Directives, other.Directives...)
+ d.Definitions = append(d.Definitions, other.Definitions...)
+ d.Extensions = append(d.Extensions, other.Extensions...)
+}
+
+type Schema struct {
+ Query *Definition
+ Mutation *Definition
+ Subscription *Definition
+
+ Types map[string]*Definition
+ Directives map[string]*DirectiveDefinition
+
+ PossibleTypes map[string][]*Definition
+}
+
+func (s *Schema) AddPossibleType(name string, def *Definition) {
+ s.PossibleTypes[name] = append(s.PossibleTypes[name], def)
+}
+
+// GetPossibleTypes will enumerate all the definitions for a given interface or union
+func (s *Schema) GetPossibleTypes(def *Definition) []*Definition {
+ if def.Kind == Union {
+ var defs []*Definition
+ for _, t := range def.Types {
+ defs = append(defs, s.Types[t])
+ }
+ return defs
+ }
+
+ return s.PossibleTypes[def.Name]
+}
+
+type SchemaDefinition struct {
+ Description string
+ Directives DirectiveList
+ OperationTypes OperationTypeDefinitionList
+ Position *Position `dump:"-"`
+}
+
+type OperationTypeDefinition struct {
+ Operation Operation
+ Type string
+ Position *Position `dump:"-"`
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/dumper.go b/vendor/github.com/vektah/gqlparser/ast/dumper.go
new file mode 100644
index 00000000..dbb7a7ef
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/dumper.go
@@ -0,0 +1,159 @@
+package ast
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+)
+
+// Dump turns ast into a stable string format for assertions in tests
+func Dump(i interface{}) string {
+ v := reflect.ValueOf(i)
+
+ d := dumper{Buffer: &bytes.Buffer{}}
+ d.dump(v)
+
+ return d.String()
+}
+
+type dumper struct {
+ *bytes.Buffer
+ indent int
+}
+
+type Dumpable interface {
+ Dump() string
+}
+
+func (d *dumper) dump(v reflect.Value) {
+ if dumpable, isDumpable := v.Interface().(Dumpable); isDumpable {
+ d.WriteString(dumpable.Dump())
+ return
+ }
+ switch v.Kind() {
+ case reflect.Bool:
+ if v.Bool() {
+ d.WriteString("true")
+ } else {
+ d.WriteString("false")
+ }
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ d.WriteString(fmt.Sprintf("%d", v.Int()))
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ d.WriteString(fmt.Sprintf("%d", v.Uint()))
+
+ case reflect.Float32, reflect.Float64:
+ d.WriteString(fmt.Sprintf("%.2f", v.Float()))
+
+ case reflect.String:
+ if v.Type().Name() != "string" {
+ d.WriteString(v.Type().Name() + "(" + strconv.Quote(v.String()) + ")")
+ } else {
+ d.WriteString(strconv.Quote(v.String()))
+ }
+
+ case reflect.Array, reflect.Slice:
+ d.dumpArray(v)
+
+ case reflect.Interface, reflect.Ptr:
+ d.dumpPtr(v)
+
+ case reflect.Struct:
+ d.dumpStruct(v)
+
+ default:
+ panic(fmt.Errorf("unsupported kind: %s\n buf: %s", v.Kind().String(), d.String()))
+ }
+}
+
+func (d *dumper) writeIndent() {
+ d.Buffer.WriteString(strings.Repeat(" ", d.indent))
+}
+
+func (d *dumper) nl() {
+ d.Buffer.WriteByte('\n')
+ d.writeIndent()
+}
+
+func typeName(t reflect.Type) string {
+ if t.Kind() == reflect.Ptr {
+ return typeName(t.Elem())
+ }
+ return t.Name()
+}
+
+func (d *dumper) dumpArray(v reflect.Value) {
+ d.WriteString("[" + typeName(v.Type().Elem()) + "]")
+
+ for i := 0; i < v.Len(); i++ {
+ d.nl()
+ d.WriteString("- ")
+ d.indent++
+ d.dump(v.Index(i))
+ d.indent--
+ }
+}
+
+func (d *dumper) dumpStruct(v reflect.Value) {
+ d.WriteString("<" + v.Type().Name() + ">")
+ d.indent++
+
+ typ := v.Type()
+ for i := 0; i < v.NumField(); i++ {
+ f := v.Field(i)
+ if typ.Field(i).Tag.Get("dump") == "-" {
+ continue
+ }
+
+ if isZero(f) {
+ continue
+ }
+ d.nl()
+ d.WriteString(typ.Field(i).Name)
+ d.WriteString(": ")
+ d.dump(v.Field(i))
+ }
+
+ d.indent--
+}
+
+func isZero(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.Ptr, reflect.Interface:
+ return v.IsNil()
+ case reflect.Func, reflect.Map:
+ return v.IsNil()
+
+ case reflect.Array, reflect.Slice:
+ if v.IsNil() {
+ return true
+ }
+ z := true
+ for i := 0; i < v.Len(); i++ {
+ z = z && isZero(v.Index(i))
+ }
+ return z
+ case reflect.Struct:
+ z := true
+ for i := 0; i < v.NumField(); i++ {
+ z = z && isZero(v.Field(i))
+ }
+ return z
+ case reflect.String:
+ return v.String() == ""
+ }
+
+ // Compare other types directly:
+ return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()))
+}
+
+func (d *dumper) dumpPtr(v reflect.Value) {
+ if v.IsNil() {
+ d.WriteString("nil")
+ return
+ }
+ d.dump(v.Elem())
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/fragment.go b/vendor/github.com/vektah/gqlparser/ast/fragment.go
new file mode 100644
index 00000000..57ab56c7
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/fragment.go
@@ -0,0 +1,38 @@
+package ast
+
+type FragmentSpread struct {
+ Name string
+ Directives DirectiveList
+
+ // Require validation
+ ObjectDefinition *Definition
+ Definition *FragmentDefinition
+
+ Position *Position `dump:"-"`
+}
+
+type InlineFragment struct {
+ TypeCondition string
+ Directives DirectiveList
+ SelectionSet SelectionSet
+
+ // Require validation
+ ObjectDefinition *Definition
+
+ Position *Position `dump:"-"`
+}
+
+type FragmentDefinition struct {
+ Name string
+ // Note: fragment variable definitions are experimental and may be changed
+ // or removed in the future.
+ VariableDefinition VariableDefinitionList
+ TypeCondition string
+ Directives DirectiveList
+ SelectionSet SelectionSet
+
+ // Require validation
+ Definition *Definition
+
+ Position *Position `dump:"-"`
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/operation.go b/vendor/github.com/vektah/gqlparser/ast/operation.go
new file mode 100644
index 00000000..03e916a0
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/operation.go
@@ -0,0 +1,29 @@
+package ast
+
+type Operation string
+
+const (
+ Query Operation = "query"
+ Mutation Operation = "mutation"
+ Subscription Operation = "subscription"
+)
+
+type OperationDefinition struct {
+ Operation Operation
+ Name string
+ VariableDefinitions VariableDefinitionList
+ Directives DirectiveList
+ SelectionSet SelectionSet
+ Position *Position `dump:"-"`
+}
+
+type VariableDefinition struct {
+ Variable string
+ Type *Type
+ DefaultValue *Value
+ Position *Position `dump:"-"`
+
+ // Requires validation
+ Definition *Definition
+ Used bool `dump:"-"`
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/selection.go b/vendor/github.com/vektah/gqlparser/ast/selection.go
new file mode 100644
index 00000000..159db844
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/selection.go
@@ -0,0 +1,39 @@
+package ast
+
+type SelectionSet []Selection
+
+type Selection interface {
+ isSelection()
+ GetPosition() *Position
+}
+
+func (*Field) isSelection() {}
+func (*FragmentSpread) isSelection() {}
+func (*InlineFragment) isSelection() {}
+
+func (s *Field) GetPosition() *Position { return s.Position }
+func (s *FragmentSpread) GetPosition() *Position { return s.Position }
+func (s *InlineFragment) GetPosition() *Position { return s.Position }
+
+type Field struct {
+ Alias string
+ Name string
+ Arguments ArgumentList
+ Directives DirectiveList
+ SelectionSet SelectionSet
+ Position *Position `dump:"-"`
+
+ // Require validation
+ Definition *FieldDefinition
+ ObjectDefinition *Definition
+}
+
+type Argument struct {
+ Name string
+ Value *Value
+ Position *Position `dump:"-"`
+}
+
+func (f *Field) ArgumentMap(vars map[string]interface{}) map[string]interface{} {
+ return arg2map(f.Definition.Arguments, f.Arguments, vars)
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/source.go b/vendor/github.com/vektah/gqlparser/ast/source.go
new file mode 100644
index 00000000..9d44dd9c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/source.go
@@ -0,0 +1,14 @@
+package ast
+
+type Source struct {
+ Name string
+ Input string
+}
+
+type Position struct {
+ Start int // The starting position, in runes, of this token in the input.
+ End int // The end position, in runes, of this token in the input.
+ Line int // The line number at the start of this item.
+ Column int // The line number at the start of this item.
+ Src *Source // The source document this token belongs to
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/type.go b/vendor/github.com/vektah/gqlparser/ast/type.go
new file mode 100644
index 00000000..9577fdb4
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/type.go
@@ -0,0 +1,68 @@
+package ast
+
+func NonNullNamedType(named string, pos *Position) *Type {
+ return &Type{NamedType: named, NonNull: true, Position: pos}
+}
+
+func NamedType(named string, pos *Position) *Type {
+ return &Type{NamedType: named, NonNull: false, Position: pos}
+}
+
+func NonNullListType(elem *Type, pos *Position) *Type {
+ return &Type{Elem: elem, NonNull: true, Position: pos}
+}
+
+func ListType(elem *Type, pos *Position) *Type {
+ return &Type{Elem: elem, NonNull: false, Position: pos}
+}
+
+type Type struct {
+ NamedType string
+ Elem *Type
+ NonNull bool
+ Position *Position `dump:"-"`
+}
+
+func (t *Type) Name() string {
+ if t.NamedType != "" {
+ return t.NamedType
+ }
+
+ return t.Elem.Name()
+}
+
+func (t *Type) String() string {
+ nn := ""
+ if t.NonNull {
+ nn = "!"
+ }
+ if t.NamedType != "" {
+ return t.NamedType + nn
+ }
+
+ return "[" + t.Elem.String() + "]" + nn
+}
+
+func (t *Type) IsCompatible(other *Type) bool {
+ if t.NamedType != other.NamedType {
+ return false
+ }
+
+ if t.Elem != nil && other.Elem == nil {
+ return false
+ }
+
+ if t.Elem != nil && !t.Elem.IsCompatible(other.Elem) {
+ return false
+ }
+
+ if other.NonNull {
+ return t.NonNull
+ }
+
+ return true
+}
+
+func (v *Type) Dump() string {
+ return v.String()
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/value.go b/vendor/github.com/vektah/gqlparser/ast/value.go
new file mode 100644
index 00000000..3168b266
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/value.go
@@ -0,0 +1,120 @@
+package ast
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+type ValueKind int
+
+const (
+ Variable ValueKind = iota
+ IntValue
+ FloatValue
+ StringValue
+ BlockValue
+ BooleanValue
+ NullValue
+ EnumValue
+ ListValue
+ ObjectValue
+)
+
+type Value struct {
+ Raw string
+ Children ChildValueList
+ Kind ValueKind
+ Position *Position `dump:"-"`
+
+ // Require validation
+ Definition *Definition
+ VariableDefinition *VariableDefinition
+ ExpectedType *Type
+}
+
+type ChildValue struct {
+ Name string
+ Value *Value
+ Position *Position `dump:"-"`
+}
+
+func (v *Value) Value(vars map[string]interface{}) (interface{}, error) {
+ if v == nil {
+ return nil, nil
+ }
+ switch v.Kind {
+ case Variable:
+ if value, ok := vars[v.Raw]; ok {
+ return value, nil
+ }
+ if v.VariableDefinition != nil && v.VariableDefinition.DefaultValue != nil {
+ return v.VariableDefinition.DefaultValue.Value(vars)
+ }
+ return nil, nil
+ case IntValue:
+ return strconv.ParseInt(v.Raw, 10, 64)
+ case FloatValue:
+ return strconv.ParseFloat(v.Raw, 64)
+ case StringValue, BlockValue, EnumValue:
+ return v.Raw, nil
+ case BooleanValue:
+ return strconv.ParseBool(v.Raw)
+ case NullValue:
+ return nil, nil
+ case ListValue:
+ var val []interface{}
+ for _, elem := range v.Children {
+ elemVal, err := elem.Value.Value(vars)
+ if err != nil {
+ return val, err
+ }
+ val = append(val, elemVal)
+ }
+ return val, nil
+ case ObjectValue:
+ val := map[string]interface{}{}
+ for _, elem := range v.Children {
+ elemVal, err := elem.Value.Value(vars)
+ if err != nil {
+ return val, err
+ }
+ val[elem.Name] = elemVal
+ }
+ return val, nil
+ default:
+ panic(fmt.Errorf("unknown value kind %d", v.Kind))
+ }
+}
+
+func (v *Value) String() string {
+ if v == nil {
+ return "<nil>"
+ }
+ switch v.Kind {
+ case Variable:
+ return "$" + v.Raw
+ case IntValue, FloatValue, EnumValue, BooleanValue, NullValue:
+ return v.Raw
+ case StringValue, BlockValue:
+ return strconv.Quote(v.Raw)
+ case ListValue:
+ var val []string
+ for _, elem := range v.Children {
+ val = append(val, elem.Value.String())
+ }
+ return "[" + strings.Join(val, ",") + "]"
+ case ObjectValue:
+ var val []string
+ for _, elem := range v.Children {
+ val = append(val, strconv.Quote(elem.Name)+":"+elem.Value.String())
+ }
+ return "{" + strings.Join(val, ",") + "}"
+ default:
+ panic(fmt.Errorf("unknown value kind %d", v.Kind))
+ }
+}
+
+func (v *Value) Dump() string {
+ return v.String()
+}
diff --git a/vendor/github.com/vektah/gqlparser/gqlerror/error.go b/vendor/github.com/vektah/gqlparser/gqlerror/error.go
new file mode 100644
index 00000000..c4c0847a
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/gqlerror/error.go
@@ -0,0 +1,133 @@
+package gqlerror
+
+import (
+ "bytes"
+ "fmt"
+ "strconv"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+// Error is the standard graphql error type described in https://facebook.github.io/graphql/draft/#sec-Errors
+type Error struct {
+ Message string `json:"message"`
+ Path []interface{} `json:"path,omitempty"`
+ Locations []Location `json:"locations,omitempty"`
+ Extensions map[string]interface{} `json:"extensions,omitempty"`
+ Rule string `json:"-"`
+}
+
+func (err *Error) SetFile(file string) {
+ if file == "" {
+ return
+ }
+ if err.Extensions == nil {
+ err.Extensions = map[string]interface{}{}
+ }
+
+ err.Extensions["file"] = file
+}
+
+type Location struct {
+ Line int `json:"line,omitempty"`
+ Column int `json:"column,omitempty"`
+}
+
+type List []*Error
+
+func (err *Error) Error() string {
+ var res bytes.Buffer
+ if err == nil {
+ return ""
+ }
+ filename, _ := err.Extensions["file"].(string)
+ if filename == "" {
+ filename = "input"
+ }
+ res.WriteString(filename)
+
+ if len(err.Locations) > 0 {
+ res.WriteByte(':')
+ res.WriteString(strconv.Itoa(err.Locations[0].Line))
+ }
+
+ res.WriteString(": ")
+ if ps := err.pathString(); ps != "" {
+ res.WriteString(ps)
+ res.WriteByte(' ')
+ }
+
+ res.WriteString(err.Message)
+
+ return res.String()
+}
+
+func (err Error) pathString() string {
+ var str bytes.Buffer
+ for i, v := range err.Path {
+
+ switch v := v.(type) {
+ case int, int64:
+ str.WriteString(fmt.Sprintf("[%d]", v))
+ default:
+ if i != 0 {
+ str.WriteByte('.')
+ }
+ str.WriteString(fmt.Sprint(v))
+ }
+ }
+ return str.String()
+}
+
+func (errs List) Error() string {
+ var buf bytes.Buffer
+ for _, err := range errs {
+ buf.WriteString(err.Error())
+ buf.WriteByte('\n')
+ }
+ return buf.String()
+}
+
+func WrapPath(path []interface{}, err error) *Error {
+ return &Error{
+ Message: err.Error(),
+ Path: path,
+ }
+}
+
+func Errorf(message string, args ...interface{}) *Error {
+ return &Error{
+ Message: fmt.Sprintf(message, args...),
+ }
+}
+
+func ErrorPathf(path []interface{}, message string, args ...interface{}) *Error {
+ return &Error{
+ Message: fmt.Sprintf(message, args...),
+ Path: path,
+ }
+}
+
+func ErrorPosf(pos *ast.Position, message string, args ...interface{}) *Error {
+ return ErrorLocf(
+ pos.Src.Name,
+ pos.Line,
+ pos.Column,
+ message,
+ args...,
+ )
+}
+
+func ErrorLocf(file string, line int, col int, message string, args ...interface{}) *Error {
+ var extensions map[string]interface{}
+ if file != "" {
+ extensions = map[string]interface{}{"file": file}
+ }
+ return &Error{
+ Message: fmt.Sprintf(message, args...),
+ Extensions: extensions,
+ Locations: []Location{
+ {Line: line, Column: col},
+ },
+ }
+}
diff --git a/vendor/github.com/vektah/gqlparser/gqlparser.go b/vendor/github.com/vektah/gqlparser/gqlparser.go
new file mode 100644
index 00000000..71e46407
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/gqlparser.go
@@ -0,0 +1,42 @@
+package gqlparser
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/parser"
+ "github.com/vektah/gqlparser/validator"
+ _ "github.com/vektah/gqlparser/validator/rules"
+)
+
+func LoadSchema(str ...*ast.Source) (*ast.Schema, *gqlerror.Error) {
+ return validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...)
+}
+
+func MustLoadSchema(str ...*ast.Source) *ast.Schema {
+ s, err := validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...)
+ if err != nil {
+ panic(err)
+ }
+ return s
+}
+
+func LoadQuery(schema *ast.Schema, str string) (*ast.QueryDocument, gqlerror.List) {
+ query, err := parser.ParseQuery(&ast.Source{Input: str})
+ if err != nil {
+ return nil, gqlerror.List{err}
+ }
+ errs := validator.Validate(schema, query)
+ if errs != nil {
+ return nil, errs
+ }
+
+ return query, nil
+}
+
+func MustLoadQuery(schema *ast.Schema, str string) *ast.QueryDocument {
+ q, err := LoadQuery(schema, str)
+ if err != nil {
+ panic(err)
+ }
+ return q
+}
diff --git a/vendor/github.com/vektah/gqlparser/lexer/blockstring.go b/vendor/github.com/vektah/gqlparser/lexer/blockstring.go
new file mode 100644
index 00000000..4065a610
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/lexer/blockstring.go
@@ -0,0 +1,58 @@
+package lexer
+
+import (
+ "math"
+ "strings"
+)
+
+// blockStringValue produces the value of a block string from its parsed raw value, similar to
+// Coffeescript's block string, Python's docstring trim or Ruby's strip_heredoc.
+//
+// This implements the GraphQL spec's BlockStringValue() static algorithm.
+func blockStringValue(raw string) string {
+ lines := strings.Split(raw, "\n")
+
+ commonIndent := math.MaxInt32
+ for _, line := range lines {
+ indent := leadingWhitespace(line)
+ if indent < len(line) && indent < commonIndent {
+ commonIndent = indent
+ if commonIndent == 0 {
+ break
+ }
+ }
+ }
+
+ if commonIndent != math.MaxInt32 && len(lines) > 0 {
+ for i := 1; i < len(lines); i++ {
+ if len(lines[i]) < commonIndent {
+ lines[i] = ""
+ } else {
+ lines[i] = lines[i][commonIndent:]
+ }
+ }
+ }
+
+ start := 0
+ end := len(lines)
+
+ for start < end && leadingWhitespace(lines[start]) == math.MaxInt32 {
+ start++
+ }
+
+ for start < end && leadingWhitespace(lines[end-1]) == math.MaxInt32 {
+ end--
+ }
+
+ return strings.Join(lines[start:end], "\n")
+}
+
+func leadingWhitespace(str string) int {
+ for i, r := range str {
+ if r != ' ' && r != '\t' {
+ return i
+ }
+ }
+ // this line is made up entirely of whitespace, its leading whitespace doesnt count.
+ return math.MaxInt32
+}
diff --git a/vendor/github.com/vektah/gqlparser/lexer/lexer.go b/vendor/github.com/vektah/gqlparser/lexer/lexer.go
new file mode 100644
index 00000000..3aaa7102
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/lexer/lexer.go
@@ -0,0 +1,510 @@
+package lexer
+
+import (
+ "bytes"
+ "unicode/utf8"
+
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+// Lexer turns graphql request and schema strings into tokens
+type Lexer struct {
+ *ast.Source
+ // An offset into the string in bytes
+ start int
+ // An offset into the string in runes
+ startRunes int
+ // An offset into the string in bytes
+ end int
+ // An offset into the string in runes
+ endRunes int
+ // the current line number
+ line int
+ // An offset into the string in rune
+ lineStartRunes int
+}
+
+func New(src *ast.Source) Lexer {
+ return Lexer{
+ Source: src,
+ line: 1,
+ }
+}
+
+// take one rune from input and advance end
+func (s *Lexer) peek() (rune, int) {
+ return utf8.DecodeRuneInString(s.Input[s.end:])
+}
+
+func (s *Lexer) makeToken(kind Type) (Token, *gqlerror.Error) {
+ return s.makeValueToken(kind, s.Input[s.start:s.end])
+}
+
+func (s *Lexer) makeValueToken(kind Type, value string) (Token, *gqlerror.Error) {
+ return Token{
+ Kind: kind,
+ Value: value,
+ Pos: ast.Position{
+ Start: s.startRunes,
+ End: s.endRunes,
+ Line: s.line,
+ Column: s.startRunes - s.lineStartRunes + 1,
+ Src: s.Source,
+ },
+ }, nil
+}
+
+func (s *Lexer) makeError(format string, args ...interface{}) (Token, *gqlerror.Error) {
+ column := s.endRunes - s.lineStartRunes + 1
+ return Token{
+ Kind: Invalid,
+ Pos: ast.Position{
+ Start: s.startRunes,
+ End: s.endRunes,
+ Line: s.line,
+ Column: column,
+ Src: s.Source,
+ },
+ }, gqlerror.ErrorLocf(s.Source.Name, s.line, column, format, args...)
+}
+
+// ReadToken gets the next token from the source starting at the given position.
+//
+// This skips over whitespace and comments until it finds the next lexable
+// token, then lexes punctuators immediately or calls the appropriate helper
+// function for more complicated tokens.
+func (s *Lexer) ReadToken() (token Token, err *gqlerror.Error) {
+
+ s.ws()
+ s.start = s.end
+ s.startRunes = s.endRunes
+
+ if s.end >= len(s.Input) {
+ return s.makeToken(EOF)
+ }
+ r := s.Input[s.start]
+ s.end++
+ s.endRunes++
+ switch r {
+ case '!':
+ return s.makeValueToken(Bang, "")
+
+ case '$':
+ return s.makeValueToken(Dollar, "")
+ case '&':
+ return s.makeValueToken(Amp, "")
+ case '(':
+ return s.makeValueToken(ParenL, "")
+ case ')':
+ return s.makeValueToken(ParenR, "")
+ case '.':
+ if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == "..." {
+ s.end += 2
+ s.endRunes += 2
+ return s.makeValueToken(Spread, "")
+ }
+ case ':':
+ return s.makeValueToken(Colon, "")
+ case '=':
+ return s.makeValueToken(Equals, "")
+ case '@':
+ return s.makeValueToken(At, "")
+ case '[':
+ return s.makeValueToken(BracketL, "")
+ case ']':
+ return s.makeValueToken(BracketR, "")
+ case '{':
+ return s.makeValueToken(BraceL, "")
+ case '}':
+ return s.makeValueToken(BraceR, "")
+ case '|':
+ return s.makeValueToken(Pipe, "")
+ case '#':
+ s.readComment()
+ return s.ReadToken()
+
+ case '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z':
+ return s.readName()
+
+ case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ return s.readNumber()
+
+ case '"':
+ if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == `"""` {
+ return s.readBlockString()
+ }
+
+ return s.readString()
+ }
+
+ s.end--
+ s.endRunes--
+
+ if r < 0x0020 && r != 0x0009 && r != 0x000a && r != 0x000d {
+ return s.makeError(`Cannot contain the invalid character "\u%04d"`, r)
+ }
+
+ if r == '\'' {
+ return s.makeError(`Unexpected single quote character ('), did you mean to use a double quote (")?`)
+ }
+
+ return s.makeError(`Cannot parse the unexpected character "%s".`, string(r))
+}
+
+// ws reads from body starting at startPosition until it finds a non-whitespace
+// or commented character, and updates the token end to include all whitespace
+func (s *Lexer) ws() {
+ for s.end < len(s.Input) {
+ switch s.Input[s.end] {
+ case '\t', ' ', ',':
+ s.end++
+ s.endRunes++
+ case '\n':
+ s.end++
+ s.endRunes++
+ s.line++
+ s.lineStartRunes = s.endRunes
+ case '\r':
+ s.end++
+ s.endRunes++
+ s.line++
+ s.lineStartRunes = s.endRunes
+ // skip the following newline if its there
+ if s.end < len(s.Input) && s.Input[s.end] == '\n' {
+ s.end++
+ s.endRunes++
+ }
+ // byte order mark, given ws is hot path we aren't relying on the unicode package here.
+ case 0xef:
+ if s.end+2 < len(s.Input) && s.Input[s.end+1] == 0xBB && s.Input[s.end+2] == 0xBF {
+ s.end += 3
+ s.endRunes++
+ } else {
+ return
+ }
+ default:
+ return
+ }
+ }
+}
+
+// readComment from the input
+//
+// #[\u0009\u0020-\uFFFF]*
+func (s *Lexer) readComment() (Token, *gqlerror.Error) {
+ for s.end < len(s.Input) {
+ r, w := s.peek()
+
+ // SourceCharacter but not LineTerminator
+ if r > 0x001f || r == '\t' {
+ s.end += w
+ s.endRunes++
+ } else {
+ break
+ }
+ }
+
+ return s.makeToken(Comment)
+}
+
+// readNumber from the input, either a float
+// or an int depending on whether a decimal point appears.
+//
+// Int: -?(0|[1-9][0-9]*)
+// Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)?
+func (s *Lexer) readNumber() (Token, *gqlerror.Error) {
+ float := false
+
+ // backup to the first digit
+ s.end--
+ s.endRunes--
+
+ s.acceptByte('-')
+
+ if s.acceptByte('0') {
+ if consumed := s.acceptDigits(); consumed != 0 {
+ s.end -= consumed
+ s.endRunes -= consumed
+ return s.makeError("Invalid number, unexpected digit after 0: %s.", s.describeNext())
+ }
+ } else {
+ if consumed := s.acceptDigits(); consumed == 0 {
+ return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext())
+ }
+ }
+
+ if s.acceptByte('.') {
+ float = true
+
+ if consumed := s.acceptDigits(); consumed == 0 {
+ return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext())
+ }
+ }
+
+ if s.acceptByte('e', 'E') {
+ float = true
+
+ s.acceptByte('-', '+')
+
+ if consumed := s.acceptDigits(); consumed == 0 {
+ return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext())
+ }
+ }
+
+ if float {
+ return s.makeToken(Float)
+ } else {
+ return s.makeToken(Int)
+ }
+}
+
+// acceptByte if it matches any of given bytes, returning true if it found anything
+func (s *Lexer) acceptByte(bytes ...uint8) bool {
+ if s.end >= len(s.Input) {
+ return false
+ }
+
+ for _, accepted := range bytes {
+ if s.Input[s.end] == accepted {
+ s.end++
+ s.endRunes++
+ return true
+ }
+ }
+ return false
+}
+
+// acceptDigits from the input, returning the number of digits it found
+func (s *Lexer) acceptDigits() int {
+ consumed := 0
+ for s.end < len(s.Input) && s.Input[s.end] >= '0' && s.Input[s.end] <= '9' {
+ s.end++
+ s.endRunes++
+ consumed++
+ }
+
+ return consumed
+}
+
+// describeNext peeks at the input and returns a human readable string. This should will alloc
+// and should only be used in errors
+func (s *Lexer) describeNext() string {
+ if s.end < len(s.Input) {
+ return `"` + string(s.Input[s.end]) + `"`
+ }
+ return "<EOF>"
+}
+
+// readString from the input
+//
+// "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*"
+func (s *Lexer) readString() (Token, *gqlerror.Error) {
+ inputLen := len(s.Input)
+
+ // this buffer is lazily created only if there are escape characters.
+ var buf *bytes.Buffer
+
+ // skip the opening quote
+ s.start++
+ s.startRunes++
+
+ for s.end < inputLen {
+ r := s.Input[s.end]
+ if r == '\n' || r == '\r' {
+ break
+ }
+ if r < 0x0020 && r != '\t' {
+ return s.makeError(`Invalid character within String: "\u%04d".`, r)
+ }
+ switch r {
+ default:
+ var char = rune(r)
+ var w = 1
+
+ // skip unicode overhead if we are in the ascii range
+ if r >= 127 {
+ char, w = utf8.DecodeRuneInString(s.Input[s.end:])
+ }
+ s.end += w
+ s.endRunes++
+
+ if buf != nil {
+ buf.WriteRune(char)
+ }
+
+ case '"':
+ t, err := s.makeToken(String)
+ // the token should not include the quotes in its value, but should cover them in its position
+ t.Pos.Start--
+ t.Pos.End++
+
+ if buf != nil {
+ t.Value = buf.String()
+ }
+
+ // skip the close quote
+ s.end++
+ s.endRunes++
+
+ return t, err
+
+ case '\\':
+ if s.end+1 >= inputLen {
+ s.end++
+ s.endRunes++
+ return s.makeError(`Invalid character escape sequence.`)
+ }
+
+ if buf == nil {
+ buf = bytes.NewBufferString(s.Input[s.start:s.end])
+ }
+
+ escape := s.Input[s.end+1]
+
+ if escape == 'u' {
+ if s.end+6 >= inputLen {
+ s.end++
+ s.endRunes++
+ return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:])
+ }
+
+ r, ok := unhex(s.Input[s.end+2 : s.end+6])
+ if !ok {
+ s.end++
+ s.endRunes++
+ return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:s.end+5])
+ }
+ buf.WriteRune(r)
+ s.end += 6
+ s.endRunes += 6
+ } else {
+ switch escape {
+ case '"', '/', '\\':
+ buf.WriteByte(escape)
+ case 'b':
+ buf.WriteByte('\b')
+ case 'f':
+ buf.WriteByte('\f')
+ case 'n':
+ buf.WriteByte('\n')
+ case 'r':
+ buf.WriteByte('\r')
+ case 't':
+ buf.WriteByte('\t')
+ default:
+ s.end += 1
+ s.endRunes += 1
+ return s.makeError("Invalid character escape sequence: \\%s.", string(escape))
+ }
+ s.end += 2
+ s.endRunes += 2
+ }
+ }
+ }
+
+ return s.makeError("Unterminated string.")
+}
+
+// readBlockString from the input
+//
+// """("?"?(\\"""|\\(?!=""")|[^"\\]))*"""
+func (s *Lexer) readBlockString() (Token, *gqlerror.Error) {
+ inputLen := len(s.Input)
+
+ var buf bytes.Buffer
+
+ // skip the opening quote
+ s.start += 3
+ s.startRunes += 3
+ s.end += 2
+ s.endRunes += 2
+
+ for s.end < inputLen {
+ r := s.Input[s.end]
+
+ // Closing triple quote (""")
+ if r == '"' && s.end+3 <= inputLen && s.Input[s.end:s.end+3] == `"""` {
+ t, err := s.makeValueToken(BlockString, blockStringValue(buf.String()))
+
+ // the token should not include the quotes in its value, but should cover them in its position
+ t.Pos.Start -= 3
+ t.Pos.End += 3
+
+ // skip the close quote
+ s.end += 3
+ s.endRunes += 3
+
+ return t, err
+ }
+
+ // SourceCharacter
+ if r < 0x0020 && r != '\t' && r != '\n' && r != '\r' {
+ return s.makeError(`Invalid character within String: "\u%04d".`, r)
+ }
+
+ if r == '\\' && s.end+4 <= inputLen && s.Input[s.end:s.end+4] == `\"""` {
+ buf.WriteString(`"""`)
+ s.end += 4
+ s.endRunes += 4
+ } else if r == '\r' {
+ if s.end+1 <= inputLen && s.Input[s.end+1] == '\n' {
+ s.end++
+ s.endRunes++
+ }
+
+ buf.WriteByte('\n')
+ s.end++
+ s.endRunes++
+ } else {
+ var char = rune(r)
+ var w = 1
+
+ // skip unicode overhead if we are in the ascii range
+ if r >= 127 {
+ char, w = utf8.DecodeRuneInString(s.Input[s.end:])
+ }
+ s.end += w
+ s.endRunes++
+ buf.WriteRune(char)
+ }
+ }
+
+ return s.makeError("Unterminated string.")
+}
+
+func unhex(b string) (v rune, ok bool) {
+ for _, c := range b {
+ v <<= 4
+ switch {
+ case '0' <= c && c <= '9':
+ v |= c - '0'
+ case 'a' <= c && c <= 'f':
+ v |= c - 'a' + 10
+ case 'A' <= c && c <= 'F':
+ v |= c - 'A' + 10
+ default:
+ return 0, false
+ }
+ }
+
+ return v, true
+}
+
+// readName from the input
+//
+// [_A-Za-z][_0-9A-Za-z]*
+func (s *Lexer) readName() (Token, *gqlerror.Error) {
+ for s.end < len(s.Input) {
+ r, w := s.peek()
+
+ if (r >= '0' && r <= '9') || (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' {
+ s.end += w
+ s.endRunes++
+ } else {
+ break
+ }
+ }
+
+ return s.makeToken(Name)
+}
diff --git a/vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml b/vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml
new file mode 100644
index 00000000..e2c26696
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml
@@ -0,0 +1,672 @@
+encoding:
+ - name: disallows uncommon control characters
+ input: "\u0007"
+ error:
+ message: 'Cannot contain the invalid character "\u0007"'
+ locations: [{line: 1, column: 1}]
+
+ - name: accepts BOM header
+ input: "\uFEFF foo"
+ tokens:
+ -
+ kind: NAME
+ start: 2
+ end: 5
+ value: 'foo'
+
+simple tokens:
+ - name: records line and column
+ input: "\n \r\n \r foo\n"
+ tokens:
+ -
+ kind: NAME
+ start: 8
+ end: 11
+ line: 4
+ column: 3
+ value: 'foo'
+
+ - name: skips whitespace
+ input: "\n\n foo\n\n\n"
+ tokens:
+ -
+ kind: NAME
+ start: 6
+ end: 9
+ value: 'foo'
+
+ - name: skips comments
+ input: "\n #comment\n foo#comment\n"
+ tokens:
+ -
+ kind: NAME
+ start: 18
+ end: 21
+ value: 'foo'
+
+ - name: skips commas
+ input: ",,,foo,,,"
+ tokens:
+ -
+ kind: NAME
+ start: 3
+ end: 6
+ value: 'foo'
+
+ - name: errors respect whitespace
+ input: "\n\n ?\n\n\n"
+ error:
+ message: 'Cannot parse the unexpected character "?".'
+ locations: [{line: 3, column: 5}]
+ string: |
+ Syntax Error: Cannot parse the unexpected character "?".
+ GraphQL request (3:5)
+ 2:
+ 3: ?
+ ^
+ 4:
+
+ - name: lex reports useful information for dashes in names
+ input: "a-b"
+ error:
+ message: 'Invalid number, expected digit but got: "b".'
+ locations: [{ line: 1, column: 3 }]
+ tokens:
+ -
+ kind: Name
+ start: 0
+ end: 1
+ value: a
+
+lexes strings:
+ - name: basic
+ input: '"simple"'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 8
+ value: 'simple'
+
+ - name: whitespace
+ input: '" white space "'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 15
+ value: ' white space '
+
+ - name: quote
+ input: '"quote \""'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 10
+ value: 'quote "'
+
+ - name: escaped
+ input: '"escaped \n\r\b\t\f"'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 20
+ value: "escaped \n\r\b\t\f"
+
+ - name: slashes
+ input: '"slashes \\ \/"'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 15
+ value: 'slashes \ /'
+
+ - name: unicode
+ input: '"unicode \u1234\u5678\u90AB\uCDEF"'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 34
+ value: "unicode \u1234\u5678\u90AB\uCDEF"
+
+lex reports useful string errors:
+ - name: unterminated
+ input: '"'
+ error:
+ message: "Unterminated string."
+ locations: [{ line: 1, column: 2 }]
+
+ - name: no end quote
+ input: '"no end quote'
+ error:
+ message: 'Unterminated string.'
+ locations: [{ line: 1, column: 14 }]
+
+ - name: single quotes
+ input: "'single quotes'"
+ error:
+ message: "Unexpected single quote character ('), did you mean to use a double quote (\")?"
+ locations: [{ line: 1, column: 1 }]
+
+ - name: control characters
+ input: "\"contains unescaped \u0007 control char\""
+ error:
+ message: 'Invalid character within String: "\u0007".'
+ locations: [{ line: 1, column: 21 }]
+
+ - name: null byte
+ input: "\"null-byte is not \u0000 end of file\""
+ error:
+ message: 'Invalid character within String: "\u0000".'
+ locations: [{ line: 1, column: 19 }]
+
+ - name: unterminated newline
+ input: "\"multi\nline\""
+ error:
+ message: 'Unterminated string.'
+ locations: [{line: 1, column: 7 }]
+
+ - name: unterminated carriage return
+ input: "\"multi\rline\""
+ error:
+ message: 'Unterminated string.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: bad escape character
+ input: '"bad \z esc"'
+ error:
+ message: 'Invalid character escape sequence: \z.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: hex escape sequence
+ input: '"bad \x esc"'
+ error:
+ message: 'Invalid character escape sequence: \x.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: short escape sequence
+ input: '"bad \u1 esc"'
+ error:
+ message: 'Invalid character escape sequence: \u1 es.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: invalid escape sequence 1
+ input: '"bad \u0XX1 esc"'
+ error:
+ message: 'Invalid character escape sequence: \u0XX1.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: invalid escape sequence 2
+ input: '"bad \uXXXX esc"'
+ error:
+ message: 'Invalid character escape sequence: \uXXXX.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: invalid escape sequence 3
+ input: '"bad \uFXXX esc"'
+ error:
+ message: 'Invalid character escape sequence: \uFXXX.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: invalid character escape sequence
+ input: '"bad \uXXXF esc"'
+ error:
+ message: 'Invalid character escape sequence: \uXXXF.'
+ locations: [{ line: 1, column: 7 }]
+
+lexes block strings:
+ - name: simple
+ input: '"""simple"""'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 12
+ value: 'simple'
+
+ - name: white space
+ input: '""" white space """'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 19
+ value: ' white space '
+
+ - name: contains quote
+ input: '"""contains " quote"""'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 22
+ value: 'contains " quote'
+
+ - name: contains triplequote
+ input: "\"\"\"contains \\\"\"\" triplequote\"\"\""
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 31
+ value: 'contains """ triplequote'
+
+ - name: multi line
+ input: "\"\"\"multi\nline\"\"\""
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 16
+ value: "multi\nline"
+
+ - name: multi line normalized
+ input: "\"\"\"multi\rline\r\nnormalized\"\"\""
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 28
+ value: "multi\nline\nnormalized"
+
+ - name: unescaped
+ input: '"""unescaped \n\r\b\t\f\u1234"""'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 32
+ value: 'unescaped \n\r\b\t\f\u1234'
+
+ - name: slashes
+ input: '"""slashes \\ \/"""'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 19
+ value: 'slashes \\ \/'
+
+ - name: multiple lines
+ input: |
+ """
+
+ spans
+ multiple
+ lines
+
+ """
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 36
+ value: "spans\n multiple\n lines"
+
+lex reports useful block string errors:
+ - name: unterminated string
+ input: '"""'
+ error:
+ message: "Unterminated string."
+ locations: [{ line: 1, column: 4 }]
+
+ - name: unescaped control characters
+ input: "\"\"\"contains unescaped \u0007 control char\"\"\""
+ error:
+ message: 'Invalid character within String: "\u0007".'
+ locations: [{ line: 1, column: 23 }]
+
+ - name: null byte
+ input: "\"\"\"null-byte is not \u0000 end of file\"\"\""
+ error:
+ message: 'Invalid character within String: "\u0000".'
+ locations: [{ line: 1, column: 21 }]
+
+lexes numbers:
+ - name: integer
+ input: "4"
+ tokens:
+ -
+ kind: INT
+ start: 0
+ end: 1
+ value: '4'
+
+ - name: float
+ input: "4.123"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 5
+ value: '4.123'
+
+ - name: negative
+ input: "-4"
+ tokens:
+ -
+ kind: INT
+ start: 0
+ end: 2
+ value: '-4'
+
+ - name: nine
+ input: "9"
+ tokens:
+ -
+ kind: INT
+ start: 0
+ end: 1
+ value: '9'
+
+ - name: zero
+ input: "0"
+ tokens:
+ -
+ kind: INT
+ start: 0
+ end: 1
+ value: '0'
+
+ - name: negative float
+ input: "-4.123"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 6
+ value: '-4.123'
+
+ - name: float leading zero
+ input: "0.123"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 5
+ value: '0.123'
+
+ - name: exponent whole
+ input: "123e4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 5
+ value: '123e4'
+
+ - name: exponent uppercase
+ input: "123E4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 5
+ value: '123E4'
+
+ - name: exponent negative power
+ input: "123e-4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 6
+ value: '123e-4'
+
+ - name: exponent positive power
+ input: "123e+4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 6
+ value: '123e+4'
+
+ - name: exponent negative base
+ input: "-1.123e4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 8
+ value: '-1.123e4'
+
+ - name: exponent negative base upper
+ input: "-1.123E4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 8
+ value: '-1.123E4'
+
+ - name: exponent negative base negative power
+ input: "-1.123e-4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 9
+ value: '-1.123e-4'
+
+ - name: exponent negative base positive power
+ input: "-1.123e+4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 9
+ value: '-1.123e+4'
+
+ - name: exponent negative base large power
+ input: "-1.123e4567"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 11
+ value: '-1.123e4567'
+
+lex reports useful number errors:
+ - name: zero
+ input: "00"
+ error:
+ message: 'Invalid number, unexpected digit after 0: "0".'
+ locations: [{ line: 1, column: 2 }]
+
+ - name: positive
+ input: "+1"
+ error:
+ message: 'Cannot parse the unexpected character "+".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: trailing dot
+ input: "1."
+ error:
+ message: 'Invalid number, expected digit but got: <EOF>.'
+ locations: [{ line: 1, column: 3 }]
+
+ - name: traililng dot exponent
+ input: "1.e1"
+ error:
+ message: 'Invalid number, expected digit but got: "e".'
+ locations: [{ line: 1, column: 3 }]
+
+ - name: missing leading zero
+ input: ".123"
+ error:
+ message: 'Cannot parse the unexpected character ".".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: characters
+ input: "1.A"
+ error:
+ message: 'Invalid number, expected digit but got: "A".'
+ locations: [{ line: 1, column: 3 }]
+
+ - name: negative characters
+ input: "-A"
+ error:
+ message: 'Invalid number, expected digit but got: "A".'
+ locations: [{ line: 1, column: 2 }]
+
+ - name: missing exponent
+ input: '1.0e'
+ error:
+ message: 'Invalid number, expected digit but got: <EOF>.'
+ locations: [{ line: 1, column: 5 }]
+
+ - name: character exponent
+ input: "1.0eA"
+ error:
+ message: 'Invalid number, expected digit but got: "A".'
+ locations: [{ line: 1, column: 5 }]
+
+lexes punctuation:
+ - name: bang
+ input: "!"
+ tokens:
+ -
+ kind: BANG
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: dollar
+ input: "$"
+ tokens:
+ -
+ kind: DOLLAR
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: open paren
+ input: "("
+ tokens:
+ -
+ kind: PAREN_L
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: close paren
+ input: ")"
+ tokens:
+ -
+ kind: PAREN_R
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: spread
+ input: "..."
+ tokens:
+ -
+ kind: SPREAD
+ start: 0
+ end: 3
+ value: undefined
+
+ - name: colon
+ input: ":"
+ tokens:
+ -
+ kind: COLON
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: equals
+ input: "="
+ tokens:
+ -
+ kind: EQUALS
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: at
+ input: "@"
+ tokens:
+ -
+ kind: AT
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: open bracket
+ input: "["
+ tokens:
+ -
+ kind: BRACKET_L
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: close bracket
+ input: "]"
+ tokens:
+ -
+ kind: BRACKET_R
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: open brace
+ input: "{"
+ tokens:
+ -
+ kind: BRACE_L
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: close brace
+ input: "}"
+ tokens:
+ -
+ kind: BRACE_R
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: pipe
+ input: "|"
+ tokens:
+ -
+ kind: PIPE
+ start: 0
+ end: 1
+ value: undefined
+
+lex reports useful unknown character error:
+ - name: not a spread
+ input: ".."
+ error:
+ message: 'Cannot parse the unexpected character ".".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: question mark
+ input: "?"
+ error:
+ message: 'Cannot parse the unexpected character "?".'
+ message: 'Cannot parse the unexpected character "?".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: unicode 203
+ input: "\u203B"
+ error:
+ message: 'Cannot parse the unexpected character "â".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: unicode 200
+ input: "\u200b"
+ error:
+ message: 'Cannot parse the unexpected character "â".'
+ locations: [{ line: 1, column: 1 }]
+
diff --git a/vendor/github.com/vektah/gqlparser/lexer/token.go b/vendor/github.com/vektah/gqlparser/lexer/token.go
new file mode 100644
index 00000000..aef8b729
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/lexer/token.go
@@ -0,0 +1,148 @@
+package lexer
+
+import (
+ "strconv"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+const (
+ Invalid Type = iota
+ EOF
+ Bang
+ Dollar
+ Amp
+ ParenL
+ ParenR
+ Spread
+ Colon
+ Equals
+ At
+ BracketL
+ BracketR
+ BraceL
+ BraceR
+ Pipe
+ Name
+ Int
+ Float
+ String
+ BlockString
+ Comment
+)
+
+func (t Type) Name() string {
+ switch t {
+ case Invalid:
+ return "Invalid"
+ case EOF:
+ return "EOF"
+ case Bang:
+ return "Bang"
+ case Dollar:
+ return "Dollar"
+ case Amp:
+ return "Amp"
+ case ParenL:
+ return "ParenL"
+ case ParenR:
+ return "ParenR"
+ case Spread:
+ return "Spread"
+ case Colon:
+ return "Colon"
+ case Equals:
+ return "Equals"
+ case At:
+ return "At"
+ case BracketL:
+ return "BracketL"
+ case BracketR:
+ return "BracketR"
+ case BraceL:
+ return "BraceL"
+ case BraceR:
+ return "BraceR"
+ case Pipe:
+ return "Pipe"
+ case Name:
+ return "Name"
+ case Int:
+ return "Int"
+ case Float:
+ return "Float"
+ case String:
+ return "String"
+ case BlockString:
+ return "BlockString"
+ case Comment:
+ return "Comment"
+ }
+ return "Unknown " + strconv.Itoa(int(t))
+}
+
+func (t Type) String() string {
+ switch t {
+ case Invalid:
+ return "<Invalid>"
+ case EOF:
+ return "<EOF>"
+ case Bang:
+ return "!"
+ case Dollar:
+ return "$"
+ case Amp:
+ return "&"
+ case ParenL:
+ return "("
+ case ParenR:
+ return ")"
+ case Spread:
+ return "..."
+ case Colon:
+ return ":"
+ case Equals:
+ return "="
+ case At:
+ return "@"
+ case BracketL:
+ return "["
+ case BracketR:
+ return "]"
+ case BraceL:
+ return "{"
+ case BraceR:
+ return "}"
+ case Pipe:
+ return "|"
+ case Name:
+ return "Name"
+ case Int:
+ return "Int"
+ case Float:
+ return "Float"
+ case String:
+ return "String"
+ case BlockString:
+ return "BlockString"
+ case Comment:
+ return "Comment"
+ }
+ return "Unknown " + strconv.Itoa(int(t))
+}
+
+// Kind represents a type of token. The types are predefined as constants.
+type Type int
+
+type Token struct {
+ Kind Type // The token type.
+ Value string // The literal value consumed.
+ Pos ast.Position // The file and line this token was read from
+}
+
+func (t Token) String() string {
+ if t.Value != "" {
+ return t.Kind.String() + " " + strconv.Quote(t.Value)
+ }
+ return t.Kind.String()
+}
diff --git a/vendor/github.com/vektah/gqlparser/parser/parser.go b/vendor/github.com/vektah/gqlparser/parser/parser.go
new file mode 100644
index 00000000..f3648cb3
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/parser.go
@@ -0,0 +1,112 @@
+package parser
+
+import (
+ "strconv"
+
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/lexer"
+)
+
+type parser struct {
+ lexer lexer.Lexer
+ err *gqlerror.Error
+
+ peeked bool
+ peekToken lexer.Token
+ peekError *gqlerror.Error
+
+ prev lexer.Token
+}
+
+func (p *parser) peekPos() *ast.Position {
+ if p.err != nil {
+ return nil
+ }
+
+ peek := p.peek()
+ return &peek.Pos
+}
+
+func (p *parser) peek() lexer.Token {
+ if p.err != nil {
+ return p.prev
+ }
+
+ if !p.peeked {
+ p.peekToken, p.peekError = p.lexer.ReadToken()
+ p.peeked = true
+ }
+
+ return p.peekToken
+}
+
+func (p *parser) error(tok lexer.Token, format string, args ...interface{}) {
+ if p.err != nil {
+ return
+ }
+ p.err = gqlerror.ErrorLocf(tok.Pos.Src.Name, tok.Pos.Line, tok.Pos.Column, format, args...)
+}
+
+func (p *parser) next() lexer.Token {
+ if p.err != nil {
+ return p.prev
+ }
+ if p.peeked {
+ p.peeked = false
+ p.prev, p.err = p.peekToken, p.peekError
+ } else {
+ p.prev, p.err = p.lexer.ReadToken()
+ }
+ return p.prev
+}
+
+func (p *parser) expectKeyword(value string) lexer.Token {
+ tok := p.peek()
+ if tok.Kind == lexer.Name && tok.Value == value {
+ return p.next()
+ }
+
+ p.error(tok, "Expected %s, found %s", strconv.Quote(value), tok.String())
+ return tok
+}
+
+func (p *parser) expect(kind lexer.Type) lexer.Token {
+ tok := p.peek()
+ if tok.Kind == kind {
+ return p.next()
+ }
+
+ p.error(tok, "Expected %s, found %s", kind, tok.Kind.String())
+ return tok
+}
+
+func (p *parser) skip(kind lexer.Type) bool {
+ tok := p.peek()
+
+ if tok.Kind != kind {
+ return false
+ }
+ p.next()
+ return true
+}
+
+func (p *parser) unexpectedError() {
+ p.unexpectedToken(p.peek())
+}
+
+func (p *parser) unexpectedToken(tok lexer.Token) {
+ p.error(tok, "Unexpected %s", tok.String())
+}
+
+func (p *parser) many(start lexer.Type, end lexer.Type, cb func()) {
+ hasDef := p.skip(start)
+ if !hasDef {
+ return
+ }
+
+ for p.peek().Kind != end && p.err == nil {
+ cb()
+ }
+ p.next()
+}
diff --git a/vendor/github.com/vektah/gqlparser/parser/query.go b/vendor/github.com/vektah/gqlparser/parser/query.go
new file mode 100644
index 00000000..7fecb57f
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/query.go
@@ -0,0 +1,334 @@
+package parser
+
+import (
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/lexer"
+
+ . "github.com/vektah/gqlparser/ast"
+)
+
+func ParseQuery(source *Source) (*QueryDocument, *gqlerror.Error) {
+ p := parser{
+ lexer: lexer.New(source),
+ }
+ return p.parseQueryDocument(), p.err
+}
+
+func (p *parser) parseQueryDocument() *QueryDocument {
+ var doc QueryDocument
+ for p.peek().Kind != lexer.EOF {
+ if p.err != nil {
+ return &doc
+ }
+ doc.Position = p.peekPos()
+ switch p.peek().Kind {
+ case lexer.Name:
+ switch p.peek().Value {
+ case "query", "mutation", "subscription":
+ doc.Operations = append(doc.Operations, p.parseOperationDefinition())
+ case "fragment":
+ doc.Fragments = append(doc.Fragments, p.parseFragmentDefinition())
+ default:
+ p.unexpectedError()
+ }
+ case lexer.BraceL:
+ doc.Operations = append(doc.Operations, p.parseOperationDefinition())
+ default:
+ p.unexpectedError()
+ }
+ }
+
+ return &doc
+}
+
+func (p *parser) parseOperationDefinition() *OperationDefinition {
+ if p.peek().Kind == lexer.BraceL {
+ return &OperationDefinition{
+ Position: p.peekPos(),
+ Operation: Query,
+ SelectionSet: p.parseSelectionSet(),
+ }
+ }
+
+ var od OperationDefinition
+ od.Position = p.peekPos()
+ od.Operation = p.parseOperationType()
+
+ if p.peek().Kind == lexer.Name {
+ od.Name = p.next().Value
+ }
+
+ od.VariableDefinitions = p.parseVariableDefinitions()
+ od.Directives = p.parseDirectives(false)
+ od.SelectionSet = p.parseSelectionSet()
+
+ return &od
+}
+
+func (p *parser) parseOperationType() Operation {
+ tok := p.next()
+ switch tok.Value {
+ case "query":
+ return Query
+ case "mutation":
+ return Mutation
+ case "subscription":
+ return Subscription
+ }
+ p.unexpectedToken(tok)
+ return ""
+}
+
+func (p *parser) parseVariableDefinitions() VariableDefinitionList {
+ var defs []*VariableDefinition
+ p.many(lexer.ParenL, lexer.ParenR, func() {
+ defs = append(defs, p.parseVariableDefinition())
+ })
+
+ return defs
+}
+
+func (p *parser) parseVariableDefinition() *VariableDefinition {
+ var def VariableDefinition
+ def.Position = p.peekPos()
+ def.Variable = p.parseVariable()
+
+ p.expect(lexer.Colon)
+
+ def.Type = p.parseTypeReference()
+
+ if p.skip(lexer.Equals) {
+ def.DefaultValue = p.parseValueLiteral(true)
+ }
+
+ return &def
+}
+
+func (p *parser) parseVariable() string {
+ p.expect(lexer.Dollar)
+ return p.parseName()
+}
+
+func (p *parser) parseSelectionSet() SelectionSet {
+ var selections []Selection
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ selections = append(selections, p.parseSelection())
+ })
+
+ return SelectionSet(selections)
+}
+
+func (p *parser) parseSelection() Selection {
+ if p.peek().Kind == lexer.Spread {
+ return p.parseFragment()
+ }
+ return p.parseField()
+}
+
+func (p *parser) parseField() *Field {
+ var field Field
+ field.Position = p.peekPos()
+ field.Alias = p.parseName()
+
+ if p.skip(lexer.Colon) {
+ field.Name = p.parseName()
+ } else {
+ field.Name = field.Alias
+ }
+
+ field.Arguments = p.parseArguments(false)
+ field.Directives = p.parseDirectives(false)
+ if p.peek().Kind == lexer.BraceL {
+ field.SelectionSet = p.parseSelectionSet()
+ }
+
+ return &field
+}
+
+func (p *parser) parseArguments(isConst bool) ArgumentList {
+ var arguments ArgumentList
+ p.many(lexer.ParenL, lexer.ParenR, func() {
+ arguments = append(arguments, p.parseArgument(isConst))
+ })
+
+ return arguments
+}
+
+func (p *parser) parseArgument(isConst bool) *Argument {
+ arg := Argument{}
+ arg.Position = p.peekPos()
+ arg.Name = p.parseName()
+ p.expect(lexer.Colon)
+
+ arg.Value = p.parseValueLiteral(isConst)
+ return &arg
+}
+
+func (p *parser) parseFragment() Selection {
+ p.expect(lexer.Spread)
+
+ if peek := p.peek(); peek.Kind == lexer.Name && peek.Value != "on" {
+ return &FragmentSpread{
+ Position: p.peekPos(),
+ Name: p.parseFragmentName(),
+ Directives: p.parseDirectives(false),
+ }
+ }
+
+ var def InlineFragment
+ def.Position = p.peekPos()
+ if p.peek().Value == "on" {
+ p.next() // "on"
+
+ def.TypeCondition = p.parseName()
+ }
+
+ def.Directives = p.parseDirectives(false)
+ def.SelectionSet = p.parseSelectionSet()
+ return &def
+}
+
+func (p *parser) parseFragmentDefinition() *FragmentDefinition {
+ var def FragmentDefinition
+ def.Position = p.peekPos()
+ p.expectKeyword("fragment")
+
+ def.Name = p.parseFragmentName()
+ def.VariableDefinition = p.parseVariableDefinitions()
+
+ p.expectKeyword("on")
+
+ def.TypeCondition = p.parseName()
+ def.Directives = p.parseDirectives(false)
+ def.SelectionSet = p.parseSelectionSet()
+ return &def
+}
+
+func (p *parser) parseFragmentName() string {
+ if p.peek().Value == "on" {
+ p.unexpectedError()
+ return ""
+ }
+
+ return p.parseName()
+}
+
+func (p *parser) parseValueLiteral(isConst bool) *Value {
+ token := p.peek()
+
+ var kind ValueKind
+ switch token.Kind {
+ case lexer.BracketL:
+ return p.parseList(isConst)
+ case lexer.BraceL:
+ return p.parseObject(isConst)
+ case lexer.Dollar:
+ if isConst {
+ p.unexpectedError()
+ return nil
+ }
+ return &Value{Position: &token.Pos, Raw: p.parseVariable(), Kind: Variable}
+ case lexer.Int:
+ kind = IntValue
+ case lexer.Float:
+ kind = FloatValue
+ case lexer.String:
+ kind = StringValue
+ case lexer.BlockString:
+ kind = BlockValue
+ case lexer.Name:
+ switch token.Value {
+ case "true", "false":
+ kind = BooleanValue
+ case "null":
+ kind = NullValue
+ default:
+ kind = EnumValue
+ }
+ default:
+ p.unexpectedError()
+ return nil
+ }
+
+ p.next()
+
+ return &Value{Position: &token.Pos, Raw: token.Value, Kind: kind}
+}
+
+func (p *parser) parseList(isConst bool) *Value {
+ var values ChildValueList
+ pos := p.peekPos()
+ p.many(lexer.BracketL, lexer.BracketR, func() {
+ values = append(values, &ChildValue{Value: p.parseValueLiteral(isConst)})
+ })
+
+ return &Value{Children: values, Kind: ListValue, Position: pos}
+}
+
+func (p *parser) parseObject(isConst bool) *Value {
+ var fields ChildValueList
+ pos := p.peekPos()
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ fields = append(fields, p.parseObjectField(isConst))
+ })
+
+ return &Value{Children: fields, Kind: ObjectValue, Position: pos}
+}
+
+func (p *parser) parseObjectField(isConst bool) *ChildValue {
+ field := ChildValue{}
+ field.Position = p.peekPos()
+ field.Name = p.parseName()
+
+ p.expect(lexer.Colon)
+
+ field.Value = p.parseValueLiteral(isConst)
+ return &field
+}
+
+func (p *parser) parseDirectives(isConst bool) []*Directive {
+ var directives []*Directive
+
+ for p.peek().Kind == lexer.At {
+ if p.err != nil {
+ break
+ }
+ directives = append(directives, p.parseDirective(isConst))
+ }
+ return directives
+}
+
+func (p *parser) parseDirective(isConst bool) *Directive {
+ p.expect(lexer.At)
+
+ return &Directive{
+ Position: p.peekPos(),
+ Name: p.parseName(),
+ Arguments: p.parseArguments(isConst),
+ }
+}
+
+func (p *parser) parseTypeReference() *Type {
+ var typ Type
+
+ if p.skip(lexer.BracketL) {
+ typ.Position = p.peekPos()
+ typ.Elem = p.parseTypeReference()
+ p.expect(lexer.BracketR)
+ } else {
+ typ.Position = p.peekPos()
+ typ.NamedType = p.parseName()
+ }
+
+ if p.skip(lexer.Bang) {
+ typ.Position = p.peekPos()
+ typ.NonNull = true
+ }
+ return &typ
+}
+
+func (p *parser) parseName() string {
+ token := p.expect(lexer.Name)
+
+ return token.Value
+}
diff --git a/vendor/github.com/vektah/gqlparser/parser/query_test.yml b/vendor/github.com/vektah/gqlparser/parser/query_test.yml
new file mode 100644
index 00000000..f392eb8e
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/query_test.yml
@@ -0,0 +1,507 @@
+parser provides useful errors:
+ - name: unclosed paren
+ input: '{'
+ error:
+ message: "Expected Name, found <EOF>"
+ locations: [{line: 1, column: 2}]
+
+ - name: missing on in fragment
+ input: |
+ { ...MissingOn }
+ fragment MissingOn Type
+ error:
+ message: 'Expected "on", found Name "Type"'
+ locations: [{ line: 2, column: 20 }]
+
+ - name: missing name after alias
+ input: '{ field: {} }'
+ error:
+ message: "Expected Name, found {"
+ locations: [{ line: 1, column: 10 }]
+
+ - name: not an operation
+ input: 'notanoperation Foo { field }'
+ error:
+ message: 'Unexpected Name "notanoperation"'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: a wild splat appears
+ input: '...'
+ error:
+ message: 'Unexpected ...'
+ locations: [{ line: 1, column: 1}]
+
+variables:
+ - name: are allowed in args
+ input: '{ field(complex: { a: { b: [ $var ] } }) }'
+
+ - name: are not allowed in default args
+ input: 'query Foo($x: Complex = { a: { b: [ $var ] } }) { field }'
+ error:
+ message: 'Unexpected $'
+ locations: [{ line: 1, column: 37 }]
+
+fragments:
+ - name: can not be named 'on'
+ input: 'fragment on on on { on }'
+ error:
+ message: 'Unexpected Name "on"'
+ locations: [{ line: 1, column: 10 }]
+
+ - name: can not spread fragments called 'on'
+ input: '{ ...on }'
+ error:
+ message: 'Expected Name, found }'
+ locations: [{ line: 1, column: 9 }]
+
+encoding:
+ - name: multibyte characters are supported
+ input: |
+ # This comment has a ਊ multi-byte character.
+ { field(arg: "Has a ਊ multi-byte character.") }
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "field"
+ Name: "field"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "arg"
+ Value: "Has a ਊ multi-byte character."
+
+keywords are allowed anywhere a name is:
+ - name: on
+ input: |
+ query on {
+ ... a
+ ... on on { field }
+ }
+ fragment a on Type {
+ on(on: $on)
+ @on(on: on)
+ }
+
+ - name: subscription
+ input: |
+ query subscription {
+ ... subscription
+ ... on subscription { field }
+ }
+ fragment subscription on Type {
+ subscription(subscription: $subscription)
+ @subscription(subscription: subscription)
+ }
+
+ - name: true
+ input: |
+ query true {
+ ... true
+ ... on true { field }
+ }
+ fragment true on Type {
+ true(true: $true)
+ @true(true: true)
+ }
+
+operations:
+ - name: anonymous mutation
+ input: 'mutation { mutationField }'
+
+ - name: named mutation
+ input: 'mutation Foo { mutationField }'
+
+ - name: anonymous subscription
+ input: 'subscription { subscriptionField }'
+
+ - name: named subscription
+ input: 'subscription Foo { subscriptionField }'
+
+
+ast:
+ - name: simple query
+ input: |
+ {
+ node(id: 4) {
+ id,
+ name
+ }
+ }
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "node"
+ Name: "node"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "id"
+ Value: 4
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <Field>
+ Alias: "name"
+ Name: "name"
+
+ - name: nameless query with no variables
+ input: |
+ query {
+ node {
+ id
+ }
+ }
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "node"
+ Name: "node"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+
+ - name: fragment defined variables
+ input: 'fragment a($v: Boolean = false) on t { f(v: $v) }'
+ ast: |
+ <QueryDocument>
+ Fragments: [FragmentDefinition]
+ - <FragmentDefinition>
+ Name: "a"
+ VariableDefinition: [VariableDefinition]
+ - <VariableDefinition>
+ Variable: "v"
+ Type: Boolean
+ DefaultValue: false
+ TypeCondition: "t"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "v"
+ Value: $v
+
+
+values:
+ - name: null
+ input: '{ f(id: null) }'
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "id"
+ Value: null
+
+ - name: strings
+ input: '{ f(long: """long""", short: "short") } '
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "long"
+ Value: "long"
+ - <Argument>
+ Name: "short"
+ Value: "short"
+
+ - name: list
+ input: '{ f(id: [1,2]) }'
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "id"
+ Value: [1,2]
+
+types:
+ - name: common types
+ input: 'query ($string: String, $int: Int, $arr: [Arr], $notnull: [Arr!]!) { f }'
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ VariableDefinitions: [VariableDefinition]
+ - <VariableDefinition>
+ Variable: "string"
+ Type: String
+ - <VariableDefinition>
+ Variable: "int"
+ Type: Int
+ - <VariableDefinition>
+ Variable: "arr"
+ Type: [Arr]
+ - <VariableDefinition>
+ Variable: "notnull"
+ Type: [Arr!]!
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+
+large queries:
+ - name: kitchen sink
+ input: |
+ # Copyright (c) 2015-present, Facebook, Inc.
+ #
+ # This source code is licensed under the MIT license found in the
+ # LICENSE file in the root directory of this source tree.
+
+ query queryName($foo: ComplexType, $site: Site = MOBILE) {
+ whoever123is: node(id: [123, 456]) {
+ id ,
+ ... on User @defer {
+ field2 {
+ id ,
+ alias: field1(first:10, after:$foo,) @include(if: $foo) {
+ id,
+ ...frag
+ }
+ }
+ }
+ ... @skip(unless: $foo) {
+ id
+ }
+ ... {
+ id
+ }
+ }
+ }
+
+ mutation likeStory {
+ like(story: 123) @defer {
+ story {
+ id
+ }
+ }
+ }
+
+ subscription StoryLikeSubscription($input: StoryLikeSubscribeInput) {
+ storyLikeSubscribe(input: $input) {
+ story {
+ likers {
+ count
+ }
+ likeSentence {
+ text
+ }
+ }
+ }
+ }
+
+ fragment frag on Friend {
+ foo(size: $size, bar: $b, obj: {key: "value", block: """
+ block string uses \"""
+ """})
+ }
+
+ {
+ unnamed(truthy: true, falsey: false, nullish: null),
+ query
+ }
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ Name: "queryName"
+ VariableDefinitions: [VariableDefinition]
+ - <VariableDefinition>
+ Variable: "foo"
+ Type: ComplexType
+ - <VariableDefinition>
+ Variable: "site"
+ Type: Site
+ DefaultValue: MOBILE
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "whoever123is"
+ Name: "node"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "id"
+ Value: [123,456]
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <InlineFragment>
+ TypeCondition: "User"
+ Directives: [Directive]
+ - <Directive>
+ Name: "defer"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "field2"
+ Name: "field2"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <Field>
+ Alias: "alias"
+ Name: "field1"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "first"
+ Value: 10
+ - <Argument>
+ Name: "after"
+ Value: $foo
+ Directives: [Directive]
+ - <Directive>
+ Name: "include"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "if"
+ Value: $foo
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <FragmentSpread>
+ Name: "frag"
+ - <InlineFragment>
+ Directives: [Directive]
+ - <Directive>
+ Name: "skip"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "unless"
+ Value: $foo
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <InlineFragment>
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <OperationDefinition>
+ Operation: Operation("mutation")
+ Name: "likeStory"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "like"
+ Name: "like"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "story"
+ Value: 123
+ Directives: [Directive]
+ - <Directive>
+ Name: "defer"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "story"
+ Name: "story"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <OperationDefinition>
+ Operation: Operation("subscription")
+ Name: "StoryLikeSubscription"
+ VariableDefinitions: [VariableDefinition]
+ - <VariableDefinition>
+ Variable: "input"
+ Type: StoryLikeSubscribeInput
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "storyLikeSubscribe"
+ Name: "storyLikeSubscribe"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "input"
+ Value: $input
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "story"
+ Name: "story"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "likers"
+ Name: "likers"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "count"
+ Name: "count"
+ - <Field>
+ Alias: "likeSentence"
+ Name: "likeSentence"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "text"
+ Name: "text"
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "unnamed"
+ Name: "unnamed"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "truthy"
+ Value: true
+ - <Argument>
+ Name: "falsey"
+ Value: false
+ - <Argument>
+ Name: "nullish"
+ Value: null
+ - <Field>
+ Alias: "query"
+ Name: "query"
+ Fragments: [FragmentDefinition]
+ - <FragmentDefinition>
+ Name: "frag"
+ TypeCondition: "Friend"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "foo"
+ Name: "foo"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "size"
+ Value: $size
+ - <Argument>
+ Name: "bar"
+ Value: $b
+ - <Argument>
+ Name: "obj"
+ Value: {"key":"value","block":"block string uses \"\"\""}
diff --git a/vendor/github.com/vektah/gqlparser/parser/schema.go b/vendor/github.com/vektah/gqlparser/parser/schema.go
new file mode 100644
index 00000000..f409f1f4
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/schema.go
@@ -0,0 +1,503 @@
+package parser
+
+import (
+ . "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/lexer"
+)
+
+func ParseSchema(source *Source) (*SchemaDocument, *gqlerror.Error) {
+ p := parser{
+ lexer: lexer.New(source),
+ }
+ return p.parseSchemaDocument(), p.err
+}
+
+func (p *parser) parseSchemaDocument() *SchemaDocument {
+ var doc SchemaDocument
+ doc.Position = p.peekPos()
+ for p.peek().Kind != lexer.EOF {
+ if p.err != nil {
+ return nil
+ }
+
+ var description string
+ if p.peek().Kind == lexer.BlockString || p.peek().Kind == lexer.String {
+ description = p.parseDescription()
+ }
+
+ if p.peek().Kind != lexer.Name {
+ p.unexpectedError()
+ break
+ }
+
+ switch p.peek().Value {
+ case "scalar", "type", "interface", "union", "enum", "input":
+ doc.Definitions = append(doc.Definitions, p.parseTypeSystemDefinition(description))
+ case "schema":
+ doc.Schema = append(doc.Schema, p.parseSchemaDefinition(description))
+ case "directive":
+ doc.Directives = append(doc.Directives, p.parseDirectiveDefinition(description))
+ case "extend":
+ if description != "" {
+ p.unexpectedToken(p.prev)
+ }
+ p.parseTypeSystemExtension(&doc)
+ default:
+ p.unexpectedError()
+ return nil
+ }
+ }
+
+ return &doc
+}
+
+func (p *parser) parseDescription() string {
+ token := p.peek()
+
+ if token.Kind != lexer.BlockString && token.Kind != lexer.String {
+ return ""
+ }
+
+ return p.next().Value
+}
+
+func (p *parser) parseTypeSystemDefinition(description string) *Definition {
+ tok := p.peek()
+ if tok.Kind != lexer.Name {
+ p.unexpectedError()
+ return nil
+ }
+
+ switch tok.Value {
+ case "scalar":
+ return p.parseScalarTypeDefinition(description)
+ case "type":
+ return p.parseObjectTypeDefinition(description)
+ case "interface":
+ return p.parseInterfaceTypeDefinition(description)
+ case "union":
+ return p.parseUnionTypeDefinition(description)
+ case "enum":
+ return p.parseEnumTypeDefinition(description)
+ case "input":
+ return p.parseInputObjectTypeDefinition(description)
+ default:
+ p.unexpectedError()
+ return nil
+ }
+}
+
+func (p *parser) parseSchemaDefinition(description string) *SchemaDefinition {
+ p.expectKeyword("schema")
+
+ def := SchemaDefinition{Description: description}
+ def.Position = p.peekPos()
+ def.Description = description
+ def.Directives = p.parseDirectives(true)
+
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition())
+ })
+ return &def
+}
+
+func (p *parser) parseOperationTypeDefinition() *OperationTypeDefinition {
+ var op OperationTypeDefinition
+ op.Position = p.peekPos()
+ op.Operation = p.parseOperationType()
+ p.expect(lexer.Colon)
+ op.Type = p.parseName()
+ return &op
+}
+
+func (p *parser) parseScalarTypeDefinition(description string) *Definition {
+ p.expectKeyword("scalar")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Scalar
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ return &def
+}
+
+func (p *parser) parseObjectTypeDefinition(description string) *Definition {
+ p.expectKeyword("type")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Object
+ def.Description = description
+ def.Name = p.parseName()
+ def.Interfaces = p.parseImplementsInterfaces()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseFieldsDefinition()
+ return &def
+}
+
+func (p *parser) parseImplementsInterfaces() []string {
+ var types []string
+ if p.peek().Value == "implements" {
+ p.next()
+ // optional leading ampersand
+ p.skip(lexer.Amp)
+
+ types = append(types, p.parseName())
+ for p.skip(lexer.Amp) && p.err == nil {
+ types = append(types, p.parseName())
+ }
+ }
+ return types
+}
+
+func (p *parser) parseFieldsDefinition() FieldList {
+ var defs FieldList
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ defs = append(defs, p.parseFieldDefinition())
+ })
+ return defs
+}
+
+func (p *parser) parseFieldDefinition() *FieldDefinition {
+ var def FieldDefinition
+ def.Position = p.peekPos()
+ def.Description = p.parseDescription()
+ def.Name = p.parseName()
+ def.Arguments = p.parseArgumentDefs()
+ p.expect(lexer.Colon)
+ def.Type = p.parseTypeReference()
+ def.Directives = p.parseDirectives(true)
+
+ return &def
+}
+
+func (p *parser) parseArgumentDefs() ArgumentDefinitionList {
+ var args ArgumentDefinitionList
+ p.many(lexer.ParenL, lexer.ParenR, func() {
+ args = append(args, p.parseArgumentDef())
+ })
+ return args
+}
+
+func (p *parser) parseArgumentDef() *ArgumentDefinition {
+ var def ArgumentDefinition
+ def.Position = p.peekPos()
+ def.Description = p.parseDescription()
+ def.Name = p.parseName()
+ p.expect(lexer.Colon)
+ def.Type = p.parseTypeReference()
+ if p.skip(lexer.Equals) {
+ def.DefaultValue = p.parseValueLiteral(true)
+ }
+ def.Directives = p.parseDirectives(true)
+ return &def
+}
+
+func (p *parser) parseInputValueDef() *FieldDefinition {
+ var def FieldDefinition
+ def.Position = p.peekPos()
+ def.Description = p.parseDescription()
+ def.Name = p.parseName()
+ p.expect(lexer.Colon)
+ def.Type = p.parseTypeReference()
+ if p.skip(lexer.Equals) {
+ def.DefaultValue = p.parseValueLiteral(true)
+ }
+ def.Directives = p.parseDirectives(true)
+ return &def
+}
+
+func (p *parser) parseInterfaceTypeDefinition(description string) *Definition {
+ p.expectKeyword("interface")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Interface
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseFieldsDefinition()
+ return &def
+}
+
+func (p *parser) parseUnionTypeDefinition(description string) *Definition {
+ p.expectKeyword("union")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Union
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Types = p.parseUnionMemberTypes()
+ return &def
+}
+
+func (p *parser) parseUnionMemberTypes() []string {
+ var types []string
+ if p.skip(lexer.Equals) {
+ // optional leading pipe
+ p.skip(lexer.Pipe)
+
+ types = append(types, p.parseName())
+ for p.skip(lexer.Pipe) && p.err == nil {
+ types = append(types, p.parseName())
+ }
+ }
+ return types
+}
+
+func (p *parser) parseEnumTypeDefinition(description string) *Definition {
+ p.expectKeyword("enum")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Enum
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.EnumValues = p.parseEnumValuesDefinition()
+ return &def
+}
+
+func (p *parser) parseEnumValuesDefinition() EnumValueList {
+ var values EnumValueList
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ values = append(values, p.parseEnumValueDefinition())
+ })
+ return values
+}
+
+func (p *parser) parseEnumValueDefinition() *EnumValueDefinition {
+ return &EnumValueDefinition{
+ Position: p.peekPos(),
+ Description: p.parseDescription(),
+ Name: p.parseName(),
+ Directives: p.parseDirectives(true),
+ }
+}
+
+func (p *parser) parseInputObjectTypeDefinition(description string) *Definition {
+ p.expectKeyword("input")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = InputObject
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseInputFieldsDefinition()
+ return &def
+}
+
+func (p *parser) parseInputFieldsDefinition() FieldList {
+ var values FieldList
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ values = append(values, p.parseInputValueDef())
+ })
+ return values
+}
+
+func (p *parser) parseTypeSystemExtension(doc *SchemaDocument) {
+ p.expectKeyword("extend")
+
+ switch p.peek().Value {
+ case "schema":
+ doc.SchemaExtension = append(doc.SchemaExtension, p.parseSchemaExtension())
+ case "scalar":
+ doc.Extensions = append(doc.Extensions, p.parseScalarTypeExtension())
+ case "type":
+ doc.Extensions = append(doc.Extensions, p.parseObjectTypeExtension())
+ case "interface":
+ doc.Extensions = append(doc.Extensions, p.parseInterfaceTypeExtension())
+ case "union":
+ doc.Extensions = append(doc.Extensions, p.parseUnionTypeExtension())
+ case "enum":
+ doc.Extensions = append(doc.Extensions, p.parseEnumTypeExtension())
+ case "input":
+ doc.Extensions = append(doc.Extensions, p.parseInputObjectTypeExtension())
+ default:
+ p.unexpectedError()
+ }
+}
+
+func (p *parser) parseSchemaExtension() *SchemaDefinition {
+ p.expectKeyword("schema")
+
+ var def SchemaDefinition
+ def.Position = p.peekPos()
+ def.Directives = p.parseDirectives(true)
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition())
+ })
+ if len(def.Directives) == 0 && len(def.OperationTypes) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseScalarTypeExtension() *Definition {
+ p.expectKeyword("scalar")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Scalar
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ if len(def.Directives) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseObjectTypeExtension() *Definition {
+ p.expectKeyword("type")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Object
+ def.Name = p.parseName()
+ def.Interfaces = p.parseImplementsInterfaces()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseFieldsDefinition()
+ if len(def.Interfaces) == 0 && len(def.Directives) == 0 && len(def.Fields) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseInterfaceTypeExtension() *Definition {
+ p.expectKeyword("interface")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Interface
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseFieldsDefinition()
+ if len(def.Directives) == 0 && len(def.Fields) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseUnionTypeExtension() *Definition {
+ p.expectKeyword("union")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Union
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Types = p.parseUnionMemberTypes()
+
+ if len(def.Directives) == 0 && len(def.Types) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseEnumTypeExtension() *Definition {
+ p.expectKeyword("enum")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Enum
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.EnumValues = p.parseEnumValuesDefinition()
+ if len(def.Directives) == 0 && len(def.EnumValues) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseInputObjectTypeExtension() *Definition {
+ p.expectKeyword("input")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = InputObject
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(false)
+ def.Fields = p.parseInputFieldsDefinition()
+ if len(def.Directives) == 0 && len(def.Fields) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseDirectiveDefinition(description string) *DirectiveDefinition {
+ p.expectKeyword("directive")
+ p.expect(lexer.At)
+
+ var def DirectiveDefinition
+ def.Position = p.peekPos()
+ def.Description = description
+ def.Name = p.parseName()
+ def.Arguments = p.parseArgumentDefs()
+
+ p.expectKeyword("on")
+ def.Locations = p.parseDirectiveLocations()
+ return &def
+}
+
+func (p *parser) parseDirectiveLocations() []DirectiveLocation {
+ p.skip(lexer.Pipe)
+
+ locations := []DirectiveLocation{p.parseDirectiveLocation()}
+
+ for p.skip(lexer.Pipe) && p.err == nil {
+ locations = append(locations, p.parseDirectiveLocation())
+ }
+
+ return locations
+}
+
+func (p *parser) parseDirectiveLocation() DirectiveLocation {
+ name := p.expect(lexer.Name)
+
+ switch name.Value {
+ case `QUERY`:
+ return LocationQuery
+ case `MUTATION`:
+ return LocationMutation
+ case `SUBSCRIPTION`:
+ return LocationSubscription
+ case `FIELD`:
+ return LocationField
+ case `FRAGMENT_DEFINITION`:
+ return LocationFragmentDefinition
+ case `FRAGMENT_SPREAD`:
+ return LocationFragmentSpread
+ case `INLINE_FRAGMENT`:
+ return LocationInlineFragment
+ case `SCHEMA`:
+ return LocationSchema
+ case `SCALAR`:
+ return LocationScalar
+ case `OBJECT`:
+ return LocationObject
+ case `FIELD_DEFINITION`:
+ return LocationFieldDefinition
+ case `ARGUMENT_DEFINITION`:
+ return LocationArgumentDefinition
+ case `INTERFACE`:
+ return LocationInterface
+ case `UNION`:
+ return LocationUnion
+ case `ENUM`:
+ return LocationEnum
+ case `ENUM_VALUE`:
+ return LocationEnumValue
+ case `INPUT_OBJECT`:
+ return LocationInputObject
+ case `INPUT_FIELD_DEFINITION`:
+ return LocationInputFieldDefinition
+ }
+
+ p.unexpectedToken(name)
+ return ""
+}
diff --git a/vendor/github.com/vektah/gqlparser/parser/schema_test.yml b/vendor/github.com/vektah/gqlparser/parser/schema_test.yml
new file mode 100644
index 00000000..c65239a5
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/schema_test.yml
@@ -0,0 +1,505 @@
+object types:
+ - name: simple
+ input: |
+ type Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+ - name: with description
+ input: |
+ "Description"
+ type Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Description: "Description"
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+ - name: with block description
+ input: |
+ """
+ Description
+ """
+ # Even with comments between them
+ type Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Description: "Description"
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+ - name: with field arg
+ input: |
+ type Hello {
+ world(flag: Boolean): String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Arguments: [ArgumentDefinition]
+ - <ArgumentDefinition>
+ Name: "flag"
+ Type: Boolean
+ Type: String
+
+ - name: with field arg and default value
+ input: |
+ type Hello {
+ world(flag: Boolean = true): String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Arguments: [ArgumentDefinition]
+ - <ArgumentDefinition>
+ Name: "flag"
+ DefaultValue: true
+ Type: Boolean
+ Type: String
+
+ - name: with field list arg
+ input: |
+ type Hello {
+ world(things: [String]): String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Arguments: [ArgumentDefinition]
+ - <ArgumentDefinition>
+ Name: "things"
+ Type: [String]
+ Type: String
+
+ - name: with two args
+ input: |
+ type Hello {
+ world(argOne: Boolean, argTwo: Int): String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Arguments: [ArgumentDefinition]
+ - <ArgumentDefinition>
+ Name: "argOne"
+ Type: Boolean
+ - <ArgumentDefinition>
+ Name: "argTwo"
+ Type: Int
+ Type: String
+
+type extensions:
+ - name: Object extension
+ input: |
+ extend type Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Extensions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+ - name: without any fields
+ input: "extend type Hello implements Greeting"
+ ast: |
+ <SchemaDocument>
+ Extensions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "Greeting"
+
+ - name: without fields twice
+ input: |
+ extend type Hello implements Greeting
+ extend type Hello implements SecondGreeting
+ ast: |
+ <SchemaDocument>
+ Extensions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "Greeting"
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "SecondGreeting"
+
+ - name: without anything errors
+ input: "extend type Hello"
+ error:
+ message: "Unexpected <EOF>"
+ locations: [{ line: 1, column: 18 }]
+
+ - name: can have descriptions # hmm, this might not be spec compliant...
+ input: |
+ "Description"
+ extend type Hello {
+ world: String
+ }
+ error:
+ message: 'Unexpected String "Description"'
+ locations: [{ line: 1, column: 2 }]
+
+ - name: can not have descriptions on types
+ input: |
+ extend "Description" type Hello {
+ world: String
+ }
+ error:
+ message: Unexpected String "Description"
+ locations: [{ line: 1, column: 9 }]
+
+schema definition:
+ - name: simple
+ input: |
+ schema {
+ query: Query
+ }
+ ast: |
+ <SchemaDocument>
+ Schema: [SchemaDefinition]
+ - <SchemaDefinition>
+ OperationTypes: [OperationTypeDefinition]
+ - <OperationTypeDefinition>
+ Operation: Operation("query")
+ Type: "Query"
+
+schema extensions:
+ - name: simple
+ input: |
+ extend schema {
+ mutation: Mutation
+ }
+ ast: |
+ <SchemaDocument>
+ SchemaExtension: [SchemaDefinition]
+ - <SchemaDefinition>
+ OperationTypes: [OperationTypeDefinition]
+ - <OperationTypeDefinition>
+ Operation: Operation("mutation")
+ Type: "Mutation"
+
+ - name: directive only
+ input: "extend schema @directive"
+ ast: |
+ <SchemaDocument>
+ SchemaExtension: [SchemaDefinition]
+ - <SchemaDefinition>
+ Directives: [Directive]
+ - <Directive>
+ Name: "directive"
+
+ - name: without anything errors
+ input: "extend schema"
+ error:
+ message: "Unexpected <EOF>"
+ locations: [{ line: 1, column: 14}]
+
+type extensions:
+ - name: all can have directives
+ input: |
+ extend scalar Foo @deprecated
+ extend type Foo @deprecated
+ extend interface Foo @deprecated
+ extend union Foo @deprecated
+ extend enum Foo @deprecated
+ extend input Foo @deprecated
+ ast: |
+ <SchemaDocument>
+ Extensions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("SCALAR")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("INTERFACE")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("UNION")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("ENUM")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("INPUT_OBJECT")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+
+
+inheritance:
+ - name: single
+ input: "type Hello implements World { field: String }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "World"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "field"
+ Type: String
+
+ - name: multi
+ input: "type Hello implements Wo & rld { field: String }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "Wo"
+ - "rld"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "field"
+ Type: String
+
+ - name: multi with leading amp
+ input: "type Hello implements & Wo & rld { field: String }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "Wo"
+ - "rld"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "field"
+ Type: String
+
+enums:
+ - name: single value
+ input: "enum Hello { WORLD }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("ENUM")
+ Name: "Hello"
+ EnumValues: [EnumValueDefinition]
+ - <EnumValueDefinition>
+ Name: "WORLD"
+
+ - name: double value
+ input: "enum Hello { WO, RLD }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("ENUM")
+ Name: "Hello"
+ EnumValues: [EnumValueDefinition]
+ - <EnumValueDefinition>
+ Name: "WO"
+ - <EnumValueDefinition>
+ Name: "RLD"
+
+interface:
+ - name: simple
+ input: |
+ interface Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("INTERFACE")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+unions:
+ - name: simple
+ input: "union Hello = World"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("UNION")
+ Name: "Hello"
+ Types: [string]
+ - "World"
+
+ - name: with two types
+ input: "union Hello = Wo | Rld"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("UNION")
+ Name: "Hello"
+ Types: [string]
+ - "Wo"
+ - "Rld"
+
+ - name: with leading pipe
+ input: "union Hello = | Wo | Rld"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("UNION")
+ Name: "Hello"
+ Types: [string]
+ - "Wo"
+ - "Rld"
+
+ - name: cant be empty
+ input: "union Hello = || Wo | Rld"
+ error:
+ message: "Expected Name, found |"
+ locations: [{ line: 1, column: 16 }]
+
+ - name: cant double pipe
+ input: "union Hello = Wo || Rld"
+ error:
+ message: "Expected Name, found |"
+ locations: [{ line: 1, column: 19 }]
+
+ - name: cant have trailing pipe
+ input: "union Hello = | Wo | Rld |"
+ error:
+ message: "Expected Name, found <EOF>"
+ locations: [{ line: 1, column: 27 }]
+
+scalar:
+ - name: simple
+ input: "scalar Hello"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("SCALAR")
+ Name: "Hello"
+
+input object:
+ - name: simple
+ input: |
+ input Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("INPUT_OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+ - name: can not have args
+ input: |
+ input Hello {
+ world(foo: Int): String
+ }
+ error:
+ message: "Expected :, found ("
+ locations: [{ line: 2, column: 8 }]
+
+directives:
+ - name: simple
+ input: directive @foo on FIELD
+ ast: |
+ <SchemaDocument>
+ Directives: [DirectiveDefinition]
+ - <DirectiveDefinition>
+ Name: "foo"
+ Locations: [DirectiveLocation]
+ - DirectiveLocation("FIELD")
+
+ - name: invalid location
+ input: "directive @foo on FIELD | INCORRECT_LOCATION"
+ error:
+ message: 'Unexpected Name "INCORRECT_LOCATION"'
+ locations: [{ line: 1, column: 27 }]
+
diff --git a/vendor/github.com/vektah/gqlparser/readme.md b/vendor/github.com/vektah/gqlparser/readme.md
new file mode 100644
index 00000000..976d202b
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/readme.md
@@ -0,0 +1,17 @@
+gqlparser [![CircleCI](https://badgen.net/circleci/github/vektah/gqlparser/master)](https://circleci.com/gh/vektah/gqlparser) [![Go Report Card](https://goreportcard.com/badge/github.com/vektah/gqlparser)](https://goreportcard.com/report/github.com/vektah/gqlparser) [![Coverage Status](https://badgen.net/coveralls/c/github/vektah/gqlparser)](https://coveralls.io/github/vektah/gqlparser?branch=master)
+===
+
+This is a parser for graphql, written to mirror the graphql-js reference implementation as closely while remaining idiomatic and easy to use.
+
+spec target: June 2018 (Schema definition language, block strings as descriptions, error paths & extension)
+
+This parser is used by [gqlgen](https://github.com/99designs/gqlgen), and it should be reasonablly stable.
+
+Guiding principles:
+
+ - maintainability: It should be easy to stay up to date with the spec
+ - well tested: It shouldnt need a graphql server to validate itself. Changes to this repo should be self contained.
+ - server agnostic: It should be usable by any of the graphql server implementations, and any graphql client tooling.
+ - idiomatic & stable api: It should follow go best practices, especially around forwards compatibility.
+ - fast: Where it doesnt impact on the above it should be fast. Avoid unnecessary allocs in hot paths.
+ - close to reference: Where it doesnt impact on the above, it should stay close to the [graphql/graphql-js](github.com/graphql/graphql-js) reference implementation.
diff --git a/vendor/github.com/vektah/gqlparser/validator/error.go b/vendor/github.com/vektah/gqlparser/validator/error.go
new file mode 100644
index 00000000..f354dee5
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/error.go
@@ -0,0 +1,55 @@
+package validator
+
+import (
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+type ErrorOption func(err *gqlerror.Error)
+
+func Message(msg string, args ...interface{}) ErrorOption {
+ return func(err *gqlerror.Error) {
+ err.Message += fmt.Sprintf(msg, args...)
+ }
+}
+
+func At(position *ast.Position) ErrorOption {
+ return func(err *gqlerror.Error) {
+ if position == nil {
+ return
+ }
+ err.Locations = append(err.Locations, gqlerror.Location{
+ Line: position.Line,
+ Column: position.Column,
+ })
+ if position.Src.Name != "" {
+ err.SetFile(position.Src.Name)
+ }
+ }
+}
+
+func SuggestListQuoted(prefix string, typed string, suggestions []string) ErrorOption {
+ suggested := SuggestionList(typed, suggestions)
+ return func(err *gqlerror.Error) {
+ if len(suggested) > 0 {
+ err.Message += " " + prefix + " " + QuotedOrList(suggested...) + "?"
+ }
+ }
+}
+
+func SuggestListUnquoted(prefix string, typed string, suggestions []string) ErrorOption {
+ suggested := SuggestionList(typed, suggestions)
+ return func(err *gqlerror.Error) {
+ if len(suggested) > 0 {
+ err.Message += " " + prefix + " " + OrList(suggested...) + "?"
+ }
+ }
+}
+
+func Suggestf(suggestion string, args ...interface{}) ErrorOption {
+ return func(err *gqlerror.Error) {
+ err.Message += " Did you mean " + fmt.Sprintf(suggestion, args...) + "?"
+ }
+}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/tests/testdata/LICENSE b/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE
index fce4519e..fce4519e 100644
--- a/vendor/github.com/vektah/gqlgen/neelance/tests/testdata/LICENSE
+++ b/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE
diff --git a/vendor/github.com/vektah/gqlparser/validator/messaging.go b/vendor/github.com/vektah/gqlparser/validator/messaging.go
new file mode 100644
index 00000000..f1ab5873
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/messaging.go
@@ -0,0 +1,39 @@
+package validator
+
+import "bytes"
+
+// Given [ A, B, C ] return '"A", "B", or "C"'.
+func QuotedOrList(items ...string) string {
+ itemsQuoted := make([]string, len(items))
+ for i, item := range items {
+ itemsQuoted[i] = `"` + item + `"`
+ }
+ return OrList(itemsQuoted...)
+}
+
+// Given [ A, B, C ] return 'A, B, or C'.
+func OrList(items ...string) string {
+ var buf bytes.Buffer
+
+ if len(items) > 5 {
+ items = items[:5]
+ }
+ if len(items) == 2 {
+ buf.WriteString(items[0])
+ buf.WriteString(" or ")
+ buf.WriteString(items[1])
+ return buf.String()
+ }
+
+ for i, item := range items {
+ if i != 0 {
+ if i == len(items)-1 {
+ buf.WriteString(", or ")
+ } else {
+ buf.WriteString(", ")
+ }
+ }
+ buf.WriteString(item)
+ }
+ return buf.String()
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/prelude.go b/vendor/github.com/vektah/gqlparser/validator/prelude.go
new file mode 100644
index 00000000..80ce8a21
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/prelude.go
@@ -0,0 +1,5 @@
+package validator
+
+import "github.com/vektah/gqlparser/ast"
+
+var Prelude = &ast.Source{Name: "prelude.graphql", Input: "# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema\n\n# The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.\nscalar Int\n\n# The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).\nscalar Float\n\n# The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.\nscalar String\n\n# The `Boolean` scalar type represents ` + \"`\" + `true` + \"`\" + ` or ` + \"`\" + `false` + \"`\" + `.\nscalar Boolean\n\n# The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as \"4\") or integer (such as 4) input value will be accepted as an ID.\nscalar ID\n\n# The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument.\ndirective @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT\n\n# The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument.\ndirective @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT\n\n# The @deprecated directive is used within the type system definition language to indicate deprecated portions of a GraphQL service’s schema, such as deprecated fields on a type or deprecated enum values.\ndirective @deprecated(reason: String = \"No longer supported\") on FIELD_DEFINITION | ENUM_VALUE\n\ntype __Schema {\n types: [__Type!]!\n queryType: __Type!\n mutationType: __Type\n subscriptionType: __Type\n directives: [__Directive!]!\n}\n\ntype __Type {\n kind: __TypeKind!\n name: String\n description: String\n\n # OBJECT and INTERFACE only\n fields(includeDeprecated: Boolean = false): [__Field!]\n\n # OBJECT only\n interfaces: [__Type!]\n\n # INTERFACE and UNION only\n possibleTypes: [__Type!]\n\n # ENUM only\n enumValues(includeDeprecated: Boolean = false): [__EnumValue!]\n\n # INPUT_OBJECT only\n inputFields: [__InputValue!]\n\n # NON_NULL and LIST only\n ofType: __Type\n}\n\ntype __Field {\n name: String!\n description: String\n args: [__InputValue!]!\n type: __Type!\n isDeprecated: Boolean!\n deprecationReason: String\n}\n\ntype __InputValue {\n name: String!\n description: String\n type: __Type!\n defaultValue: String\n}\n\ntype __EnumValue {\n name: String!\n description: String\n isDeprecated: Boolean!\n deprecationReason: String\n}\n\nenum __TypeKind {\n SCALAR\n OBJECT\n INTERFACE\n UNION\n ENUM\n INPUT_OBJECT\n LIST\n NON_NULL\n}\n\ntype __Directive {\n name: String!\n description: String\n locations: [__DirectiveLocation!]!\n args: [__InputValue!]!\n}\n\nenum __DirectiveLocation {\n QUERY\n MUTATION\n SUBSCRIPTION\n FIELD\n FRAGMENT_DEFINITION\n FRAGMENT_SPREAD\n INLINE_FRAGMENT\n SCHEMA\n SCALAR\n OBJECT\n FIELD_DEFINITION\n ARGUMENT_DEFINITION\n INTERFACE\n UNION\n ENUM\n ENUM_VALUE\n INPUT_OBJECT\n INPUT_FIELD_DEFINITION\n}\n"}
diff --git a/vendor/github.com/vektah/gqlparser/validator/prelude.graphql b/vendor/github.com/vektah/gqlparser/validator/prelude.graphql
new file mode 100644
index 00000000..2c7f7c02
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/prelude.graphql
@@ -0,0 +1,119 @@
+# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema
+
+# The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+scalar Int
+
+# The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
+scalar Float
+
+# The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+scalar String
+
+# The `Boolean` scalar type represents ` + "`" + `true` + "`" + ` or ` + "`" + `false` + "`" + `.
+scalar Boolean
+
+# The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as "4") or integer (such as 4) input value will be accepted as an ID.
+scalar ID
+
+# The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument.
+directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
+
+# The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument.
+directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
+
+# The @deprecated directive is used within the type system definition language to indicate deprecated portions of a GraphQL service’s schema, such as deprecated fields on a type or deprecated enum values.
+directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITION | ENUM_VALUE
+
+type __Schema {
+ types: [__Type!]!
+ queryType: __Type!
+ mutationType: __Type
+ subscriptionType: __Type
+ directives: [__Directive!]!
+}
+
+type __Type {
+ kind: __TypeKind!
+ name: String
+ description: String
+
+ # OBJECT and INTERFACE only
+ fields(includeDeprecated: Boolean = false): [__Field!]
+
+ # OBJECT only
+ interfaces: [__Type!]
+
+ # INTERFACE and UNION only
+ possibleTypes: [__Type!]
+
+ # ENUM only
+ enumValues(includeDeprecated: Boolean = false): [__EnumValue!]
+
+ # INPUT_OBJECT only
+ inputFields: [__InputValue!]
+
+ # NON_NULL and LIST only
+ ofType: __Type
+}
+
+type __Field {
+ name: String!
+ description: String
+ args: [__InputValue!]!
+ type: __Type!
+ isDeprecated: Boolean!
+ deprecationReason: String
+}
+
+type __InputValue {
+ name: String!
+ description: String
+ type: __Type!
+ defaultValue: String
+}
+
+type __EnumValue {
+ name: String!
+ description: String
+ isDeprecated: Boolean!
+ deprecationReason: String
+}
+
+enum __TypeKind {
+ SCALAR
+ OBJECT
+ INTERFACE
+ UNION
+ ENUM
+ INPUT_OBJECT
+ LIST
+ NON_NULL
+}
+
+type __Directive {
+ name: String!
+ description: String
+ locations: [__DirectiveLocation!]!
+ args: [__InputValue!]!
+}
+
+enum __DirectiveLocation {
+ QUERY
+ MUTATION
+ SUBSCRIPTION
+ FIELD
+ FRAGMENT_DEFINITION
+ FRAGMENT_SPREAD
+ INLINE_FRAGMENT
+ SCHEMA
+ SCALAR
+ OBJECT
+ FIELD_DEFINITION
+ ARGUMENT_DEFINITION
+ INTERFACE
+ UNION
+ ENUM
+ ENUM_VALUE
+ INPUT_OBJECT
+ INPUT_FIELD_DEFINITION
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
new file mode 100644
index 00000000..69148d52
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
@@ -0,0 +1,86 @@
+package validator
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("FieldsOnCorrectType", func(observers *Events, addError AddErrFunc) {
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.ObjectDefinition == nil || field.Definition != nil {
+ return
+ }
+
+ message := fmt.Sprintf(`Cannot query field "%s" on type "%s".`, field.Name, field.ObjectDefinition.Name)
+
+ if suggestedTypeNames := getSuggestedTypeNames(walker, field.ObjectDefinition, field.Name); suggestedTypeNames != nil {
+ message += " Did you mean to use an inline fragment on " + QuotedOrList(suggestedTypeNames...) + "?"
+ } else if suggestedFieldNames := getSuggestedFieldNames(field.ObjectDefinition, field.Name); suggestedFieldNames != nil {
+ message += " Did you mean " + QuotedOrList(suggestedFieldNames...) + "?"
+ }
+
+ addError(
+ Message(message),
+ At(field.Position),
+ )
+ })
+ })
+}
+
+// Go through all of the implementations of type, as well as the interfaces
+// that they implement. If any of those types include the provided field,
+// suggest them, sorted by how often the type is referenced, starting
+// with Interfaces.
+func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) []string {
+ if !parent.IsAbstractType() {
+ return nil
+ }
+
+ var suggestedObjectTypes []string
+ var suggestedInterfaceTypes []string
+ interfaceUsageCount := map[string]int{}
+
+ for _, possibleType := range walker.Schema.GetPossibleTypes(parent) {
+ field := possibleType.Fields.ForName(name)
+ if field == nil {
+ continue
+ }
+
+ suggestedObjectTypes = append(suggestedObjectTypes, possibleType.Name)
+
+ for _, possibleInterface := range possibleType.Interfaces {
+ interfaceField := walker.Schema.Types[possibleInterface]
+ if interfaceField != nil && interfaceField.Fields.ForName(name) != nil {
+ if interfaceUsageCount[possibleInterface] == 0 {
+ suggestedInterfaceTypes = append(suggestedInterfaceTypes, possibleInterface)
+ }
+ interfaceUsageCount[possibleInterface]++
+ }
+ }
+ }
+
+ sort.SliceStable(suggestedInterfaceTypes, func(i, j int) bool {
+ return interfaceUsageCount[suggestedInterfaceTypes[i]] > interfaceUsageCount[suggestedInterfaceTypes[j]]
+ })
+
+ return append(suggestedInterfaceTypes, suggestedObjectTypes...)
+}
+
+// For the field name provided, determine if there are any similar field names
+// that may be the result of a typo.
+func getSuggestedFieldNames(parent *ast.Definition, name string) []string {
+ if parent.Kind != ast.Object && parent.Kind != ast.Interface {
+ return nil
+ }
+
+ var possibleFieldNames []string
+ for _, field := range parent.Fields {
+ possibleFieldNames = append(possibleFieldNames, field.Name)
+ }
+
+ return SuggestionList(name, possibleFieldNames)
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
new file mode 100644
index 00000000..a4a48246
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
@@ -0,0 +1,39 @@
+package validator
+
+import (
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("FragmentsOnCompositeTypes", func(observers *Events, addError AddErrFunc) {
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ fragmentType := walker.Schema.Types[inlineFragment.TypeCondition]
+ if fragmentType == nil || fragmentType.IsCompositeType() {
+ return
+ }
+
+ message := fmt.Sprintf(`Fragment cannot condition on non composite type "%s".`, inlineFragment.TypeCondition)
+
+ addError(
+ Message(message),
+ At(inlineFragment.Position),
+ )
+ })
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() {
+ return
+ }
+
+ message := fmt.Sprintf(`Fragment "%s" cannot condition on non composite type "%s".`, fragment.Name, fragment.TypeCondition)
+
+ addError(
+ Message(message),
+ At(fragment.Position),
+ )
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
new file mode 100644
index 00000000..83b47387
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
@@ -0,0 +1,57 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) {
+ // A GraphQL field is only valid if all supplied arguments are defined by that field.
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.Definition == nil {
+ return
+ }
+ for _, arg := range field.Arguments {
+ def := field.Definition.Arguments.ForName(arg.Name)
+ if def != nil {
+ continue
+ }
+
+ var suggestions []string
+ for _, argDef := range field.Definition.Arguments {
+ suggestions = append(suggestions, argDef.Name)
+ }
+
+ addError(
+ Message(`Unknown argument "%s" on field "%s" of type "%s".`, arg.Name, field.Name, field.ObjectDefinition.Name),
+ SuggestListQuoted("Did you mean", arg.Name, suggestions),
+ At(field.Position),
+ )
+ }
+ })
+
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ if directive.Definition == nil {
+ return
+ }
+ for _, arg := range directive.Arguments {
+ def := directive.Definition.Arguments.ForName(arg.Name)
+ if def != nil {
+ continue
+ }
+
+ var suggestions []string
+ for _, argDef := range directive.Definition.Arguments {
+ suggestions = append(suggestions, argDef.Name)
+ }
+
+ addError(
+ Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name),
+ SuggestListQuoted("Did you mean", arg.Name, suggestions),
+ At(directive.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
new file mode 100644
index 00000000..dc4353ef
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
@@ -0,0 +1,31 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownDirectives", func(observers *Events, addError AddErrFunc) {
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ if directive.Definition == nil {
+ addError(
+ Message(`Unknown directive "%s".`, directive.Name),
+ At(directive.Position),
+ )
+ return
+ }
+
+ for _, loc := range directive.Definition.Locations {
+ if loc == directive.Location {
+ return
+ }
+ }
+
+ addError(
+ Message(`Directive "%s" may not be used on %s.`, directive.Name, directive.Location),
+ At(directive.Position),
+ )
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
new file mode 100644
index 00000000..ec91588c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
@@ -0,0 +1,19 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
+ if fragmentSpread.Definition == nil {
+ addError(
+ Message(`Unknown fragment "%s".`, fragmentSpread.Name),
+ At(fragmentSpread.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
new file mode 100644
index 00000000..223086b3
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
@@ -0,0 +1,61 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownTypeNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ for _, vdef := range operation.VariableDefinitions {
+ typeName := vdef.Type.Name()
+ def := walker.Schema.Types[typeName]
+ if def != nil {
+ continue
+ }
+
+ addError(
+ Message(`Unknown type "%s".`, typeName),
+ At(operation.Position),
+ )
+ }
+ })
+
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ typedName := inlineFragment.TypeCondition
+ if typedName == "" {
+ return
+ }
+
+ def := walker.Schema.Types[typedName]
+ if def != nil {
+ return
+ }
+
+ addError(
+ Message(`Unknown type "%s".`, typedName),
+ At(inlineFragment.Position),
+ )
+ })
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ typeName := fragment.TypeCondition
+ def := walker.Schema.Types[typeName]
+ if def != nil {
+ return
+ }
+
+ var possibleTypes []string
+ for _, t := range walker.Schema.Types {
+ possibleTypes = append(possibleTypes, t.Name)
+ }
+
+ addError(
+ Message(`Unknown type "%s".`, typeName),
+ SuggestListQuoted("Did you mean", typeName, possibleTypes),
+ At(fragment.Position),
+ )
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go b/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
new file mode 100644
index 00000000..dd232142
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
@@ -0,0 +1,19 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("LoneAnonymousOperation", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ if operation.Name == "" && len(walker.Document.Operations) > 1 {
+ addError(
+ Message(`This anonymous operation must be the only defined operation.`),
+ At(operation.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
new file mode 100644
index 00000000..7511529b
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
@@ -0,0 +1,93 @@
+package validator
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoFragmentCycles", func(observers *Events, addError AddErrFunc) {
+ visitedFrags := make(map[string]bool)
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ var spreadPath []*ast.FragmentSpread
+ spreadPathIndexByName := make(map[string]int)
+
+ var recursive func(fragment *ast.FragmentDefinition)
+ recursive = func(fragment *ast.FragmentDefinition) {
+ if visitedFrags[fragment.Name] {
+ return
+ }
+
+ visitedFrags[fragment.Name] = true
+
+ spreadNodes := getFragmentSpreads(fragment.SelectionSet)
+ if len(spreadNodes) == 0 {
+ return
+ }
+ spreadPathIndexByName[fragment.Name] = len(spreadPath)
+
+ for _, spreadNode := range spreadNodes {
+ spreadName := spreadNode.Name
+
+ cycleIndex, ok := spreadPathIndexByName[spreadName]
+
+ spreadPath = append(spreadPath, spreadNode)
+ if !ok {
+ spreadFragment := walker.Document.Fragments.ForName(spreadName)
+ if spreadFragment != nil {
+ recursive(spreadFragment)
+ }
+ } else {
+ cyclePath := spreadPath[cycleIndex : len(spreadPath)-1]
+ var fragmentNames []string
+ for _, fs := range cyclePath {
+ fragmentNames = append(fragmentNames, fs.Name)
+ }
+ var via string
+ if len(fragmentNames) != 0 {
+ via = fmt.Sprintf(" via %s", strings.Join(fragmentNames, ", "))
+ }
+ addError(
+ Message(`Cannot spread fragment "%s" within itself%s.`, spreadName, via),
+ At(spreadNode.Position),
+ )
+ }
+
+ spreadPath = spreadPath[:len(spreadPath)-1]
+ }
+
+ delete(spreadPathIndexByName, fragment.Name)
+ }
+
+ recursive(fragment)
+ })
+ })
+}
+
+func getFragmentSpreads(node ast.SelectionSet) []*ast.FragmentSpread {
+ var spreads []*ast.FragmentSpread
+
+ setsToVisit := []ast.SelectionSet{node}
+
+ for len(setsToVisit) != 0 {
+ set := setsToVisit[len(setsToVisit)-1]
+ setsToVisit = setsToVisit[:len(setsToVisit)-1]
+
+ for _, selection := range set {
+ switch selection := selection.(type) {
+ case *ast.FragmentSpread:
+ spreads = append(spreads, selection)
+ case *ast.Field:
+ setsToVisit = append(setsToVisit, selection.SelectionSet)
+ case *ast.InlineFragment:
+ setsToVisit = append(setsToVisit, selection.SelectionSet)
+ }
+ }
+ }
+
+ return spreads
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
new file mode 100644
index 00000000..505206be
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
@@ -0,0 +1,28 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoUndefinedVariables", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if walker.CurrentOperation == nil || value.Kind != ast.Variable || value.VariableDefinition != nil {
+ return
+ }
+
+ if walker.CurrentOperation.Name != "" {
+ addError(
+ Message(`Variable "%s" is not defined by operation "%s".`, value, walker.CurrentOperation.Name),
+ At(walker.CurrentOperation.Position),
+ )
+ } else {
+ addError(
+ Message(`Variable "%s" is not defined.`, value),
+ At(value.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
new file mode 100644
index 00000000..4aa835f5
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
@@ -0,0 +1,30 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoUnusedFragments", func(observers *Events, addError AddErrFunc) {
+
+ inFragmentDefinition := false
+ fragmentNameUsed := make(map[string]bool)
+
+ observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
+ if !inFragmentDefinition {
+ fragmentNameUsed[fragmentSpread.Name] = true
+ }
+ })
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ inFragmentDefinition = true
+ if !fragmentNameUsed[fragment.Name] {
+ addError(
+ Message(`Fragment "%s" is never used.`, fragment.Name),
+ At(fragment.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
new file mode 100644
index 00000000..28cf7736
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
@@ -0,0 +1,30 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ for _, varDef := range operation.VariableDefinitions {
+ if varDef.Used {
+ continue
+ }
+
+ if operation.Name != "" {
+ addError(
+ Message(`Variable "$%s" is never used in operation "%s".`, varDef.Variable, operation.Name),
+ At(varDef.Position),
+ )
+ } else {
+ addError(
+ Message(`Variable "$%s" is never used.`, varDef.Variable),
+ At(varDef.Position),
+ )
+ }
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go b/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
new file mode 100644
index 00000000..52eab3a2
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
@@ -0,0 +1,553 @@
+package validator
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+
+ AddRule("OverlappingFieldsCanBeMerged", func(observers *Events, addError AddErrFunc) {
+ /**
+ * Algorithm:
+ *
+ * Conflicts occur when two fields exist in a query which will produce the same
+ * response name, but represent differing values, thus creating a conflict.
+ * The algorithm below finds all conflicts via making a series of comparisons
+ * between fields. In order to compare as few fields as possible, this makes
+ * a series of comparisons "within" sets of fields and "between" sets of fields.
+ *
+ * Given any selection set, a collection produces both a set of fields by
+ * also including all inline fragments, as well as a list of fragments
+ * referenced by fragment spreads.
+ *
+ * A) Each selection set represented in the document first compares "within" its
+ * collected set of fields, finding any conflicts between every pair of
+ * overlapping fields.
+ * Note: This is the *only time* that a the fields "within" a set are compared
+ * to each other. After this only fields "between" sets are compared.
+ *
+ * B) Also, if any fragment is referenced in a selection set, then a
+ * comparison is made "between" the original set of fields and the
+ * referenced fragment.
+ *
+ * C) Also, if multiple fragments are referenced, then comparisons
+ * are made "between" each referenced fragment.
+ *
+ * D) When comparing "between" a set of fields and a referenced fragment, first
+ * a comparison is made between each field in the original set of fields and
+ * each field in the the referenced set of fields.
+ *
+ * E) Also, if any fragment is referenced in the referenced selection set,
+ * then a comparison is made "between" the original set of fields and the
+ * referenced fragment (recursively referring to step D).
+ *
+ * F) When comparing "between" two fragments, first a comparison is made between
+ * each field in the first referenced set of fields and each field in the the
+ * second referenced set of fields.
+ *
+ * G) Also, any fragments referenced by the first must be compared to the
+ * second, and any fragments referenced by the second must be compared to the
+ * first (recursively referring to step F).
+ *
+ * H) When comparing two fields, if both have selection sets, then a comparison
+ * is made "between" both selection sets, first comparing the set of fields in
+ * the first selection set with the set of fields in the second.
+ *
+ * I) Also, if any fragment is referenced in either selection set, then a
+ * comparison is made "between" the other set of fields and the
+ * referenced fragment.
+ *
+ * J) Also, if two fragments are referenced in both selection sets, then a
+ * comparison is made "between" the two fragments.
+ *
+ */
+
+ m := &overlappingFieldsCanBeMergedManager{
+ comparedFragmentPairs: pairSet{data: make(map[string]map[string]bool)},
+ }
+
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(operation.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if walker.CurrentOperation == nil {
+ // When checking both Operation and Fragment, errors are duplicated when processing FragmentDefinition referenced from Operation
+ return
+ }
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(field.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(inlineFragment.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(fragment.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ })
+}
+
+type pairSet struct {
+ data map[string]map[string]bool
+}
+
+func (pairSet *pairSet) Add(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) {
+ add := func(a *ast.FragmentSpread, b *ast.FragmentSpread) {
+ m := pairSet.data[a.Name]
+ if m == nil {
+ m = make(map[string]bool)
+ pairSet.data[a.Name] = m
+ }
+ m[b.Name] = areMutuallyExclusive
+ }
+ add(a, b)
+ add(b, a)
+}
+
+func (pairSet *pairSet) Has(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) bool {
+ am, ok := pairSet.data[a.Name]
+ if !ok {
+ return false
+ }
+ result, ok := am[b.Name]
+ if !ok {
+ return false
+ }
+
+ // areMutuallyExclusive being false is a superset of being true,
+ // hence if we want to know if this PairSet "has" these two with no
+ // exclusivity, we have to ensure it was added as such.
+ if !areMutuallyExclusive {
+ return !result
+ }
+
+ return true
+}
+
+type sequentialFieldsMap struct {
+ // We can't use map[string][]*ast.Field. because map is not stable...
+ seq []string
+ data map[string][]*ast.Field
+}
+
+type fieldIterateEntry struct {
+ ResponseName string
+ Fields []*ast.Field
+}
+
+func (m *sequentialFieldsMap) Push(responseName string, field *ast.Field) {
+ fields, ok := m.data[responseName]
+ if !ok {
+ m.seq = append(m.seq, responseName)
+ }
+ fields = append(fields, field)
+ m.data[responseName] = fields
+}
+
+func (m *sequentialFieldsMap) Get(responseName string) ([]*ast.Field, bool) {
+ fields, ok := m.data[responseName]
+ return fields, ok
+}
+
+func (m *sequentialFieldsMap) Iterator() [][]*ast.Field {
+ fieldsList := make([][]*ast.Field, 0, len(m.seq))
+ for _, responseName := range m.seq {
+ fields := m.data[responseName]
+ fieldsList = append(fieldsList, fields)
+ }
+ return fieldsList
+}
+
+func (m *sequentialFieldsMap) KeyValueIterator() []*fieldIterateEntry {
+ fieldEntriesList := make([]*fieldIterateEntry, 0, len(m.seq))
+ for _, responseName := range m.seq {
+ fields := m.data[responseName]
+ fieldEntriesList = append(fieldEntriesList, &fieldIterateEntry{
+ ResponseName: responseName,
+ Fields: fields,
+ })
+ }
+ return fieldEntriesList
+}
+
+type conflictMessageContainer struct {
+ Conflicts []*ConflictMessage
+}
+
+type ConflictMessage struct {
+ Message string
+ ResponseName string
+ Names []string
+ SubMessage []*ConflictMessage
+ Position *ast.Position
+}
+
+func (m *ConflictMessage) String(buf *bytes.Buffer) {
+ if len(m.SubMessage) == 0 {
+ buf.WriteString(m.Message)
+ return
+ }
+
+ for idx, subMessage := range m.SubMessage {
+ buf.WriteString(`subfields "`)
+ buf.WriteString(subMessage.ResponseName)
+ buf.WriteString(`" conflict because `)
+ subMessage.String(buf)
+ if idx != len(m.SubMessage)-1 {
+ buf.WriteString(" and ")
+ }
+ }
+}
+
+func (m *ConflictMessage) addFieldsConflictMessage(addError AddErrFunc) {
+ var buf bytes.Buffer
+ m.String(&buf)
+ addError(
+ Message(`Fields "%s" conflict because %s. Use different aliases on the fields to fetch both if this was intentional.`, m.ResponseName, buf.String()),
+ At(m.Position),
+ )
+}
+
+type overlappingFieldsCanBeMergedManager struct {
+ walker *Walker
+
+ // per walker
+ comparedFragmentPairs pairSet
+ // cachedFieldsAndFragmentNames interface{}
+
+ // per selectionSet
+ comparedFragments map[string]bool
+}
+
+func (m *overlappingFieldsCanBeMergedManager) findConflictsWithinSelectionSet(selectionSet ast.SelectionSet) []*ConflictMessage {
+ if len(selectionSet) == 0 {
+ return nil
+ }
+
+ fieldsMap, fragmentSpreads := getFieldsAndFragmentNames(selectionSet)
+
+ var conflicts conflictMessageContainer
+
+ // (A) Find find all conflicts "within" the fieldMap of this selection set.
+ // Note: this is the *only place* `collectConflictsWithin` is called.
+ m.collectConflictsWithin(&conflicts, fieldsMap)
+
+ m.comparedFragments = make(map[string]bool)
+ for idx, fragmentSpreadA := range fragmentSpreads {
+ // (B) Then collect conflicts between these fieldMap and those represented by
+ // each spread fragment name found.
+ m.collectConflictsBetweenFieldsAndFragment(&conflicts, false, fieldsMap, fragmentSpreadA)
+
+ for _, fragmentSpreadB := range fragmentSpreads[idx+1:] {
+ // (C) Then compare this fragment with all other fragments found in this
+ // selection set to collect conflicts between fragments spread together.
+ // This compares each item in the list of fragment names to every other
+ // item in that same list (except for itself).
+ m.collectConflictsBetweenFragments(&conflicts, false, fragmentSpreadA, fragmentSpreadB)
+ }
+ }
+
+ return conflicts.Conflicts
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFieldsAndFragment(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fieldsMap *sequentialFieldsMap, fragmentSpread *ast.FragmentSpread) {
+ if m.comparedFragments[fragmentSpread.Name] {
+ return
+ }
+ m.comparedFragments[fragmentSpread.Name] = true
+
+ if fragmentSpread.Definition == nil {
+ return
+ }
+
+ fieldsMapB, fragmentSpreads := getFieldsAndFragmentNames(fragmentSpread.Definition.SelectionSet)
+
+ // Do not compare a fragment's fieldMap to itself.
+ if reflect.DeepEqual(fieldsMap, fieldsMapB) {
+ return
+ }
+
+ // (D) First collect any conflicts between the provided collection of fields
+ // and the collection of fields represented by the given fragment.
+ m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMap, fieldsMapB)
+
+ // (E) Then collect any conflicts between the provided collection of fields
+ // and any fragment names found in the given fragment.
+ for _, fragmentSpread := range fragmentSpreads {
+ m.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsMap, fragmentSpread)
+ }
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFragments(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
+
+ var check func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread)
+ check = func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
+
+ if fragmentSpreadA.Name == fragmentSpreadB.Name {
+ return
+ }
+
+ if m.comparedFragmentPairs.Has(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive) {
+ return
+ }
+ m.comparedFragmentPairs.Add(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive)
+
+ if fragmentSpreadA.Definition == nil {
+ return
+ }
+ if fragmentSpreadB.Definition == nil {
+ return
+ }
+
+ fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(fragmentSpreadA.Definition.SelectionSet)
+ fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(fragmentSpreadB.Definition.SelectionSet)
+
+ // (F) First, collect all conflicts between these two collections of fields
+ // (not including any nested fragments).
+ m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
+
+ // (G) Then collect conflicts between the first fragment and any nested
+ // fragments spread in the second fragment.
+ for _, fragmentSpread := range fragmentSpreadsB {
+ check(fragmentSpreadA, fragmentSpread)
+ }
+ // (G) Then collect conflicts between the second fragment and any nested
+ // fragments spread in the first fragment.
+ for _, fragmentSpread := range fragmentSpreadsA {
+ check(fragmentSpread, fragmentSpreadB)
+ }
+ }
+
+ check(fragmentSpreadA, fragmentSpreadB)
+}
+
+func (m *overlappingFieldsCanBeMergedManager) findConflictsBetweenSubSelectionSets(areMutuallyExclusive bool, selectionSetA ast.SelectionSet, selectionSetB ast.SelectionSet) *conflictMessageContainer {
+ var conflicts conflictMessageContainer
+
+ fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(selectionSetA)
+ fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(selectionSetB)
+
+ // (H) First, collect all conflicts between these two collections of field.
+ m.collectConflictsBetween(&conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
+
+ // (I) Then collect conflicts between the first collection of fields and
+ // those referenced by each fragment name associated with the second.
+ for _, fragmentSpread := range fragmentSpreadsB {
+ m.comparedFragments = make(map[string]bool)
+ m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapA, fragmentSpread)
+ }
+
+ // (I) Then collect conflicts between the second collection of fields and
+ // those referenced by each fragment name associated with the first.
+ for _, fragmentSpread := range fragmentSpreadsA {
+ m.comparedFragments = make(map[string]bool)
+ m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapB, fragmentSpread)
+ }
+
+ // (J) Also collect conflicts between any fragment names by the first and
+ // fragment names by the second. This compares each item in the first set of
+ // names to each item in the second set of names.
+ for _, fragmentSpreadA := range fragmentSpreadsA {
+ for _, fragmentSpreadB := range fragmentSpreadsB {
+ m.collectConflictsBetweenFragments(&conflicts, areMutuallyExclusive, fragmentSpreadA, fragmentSpreadB)
+ }
+ }
+
+ if len(conflicts.Conflicts) == 0 {
+ return nil
+ }
+
+ return &conflicts
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsWithin(conflicts *conflictMessageContainer, fieldsMap *sequentialFieldsMap) {
+ for _, fields := range fieldsMap.Iterator() {
+ for idx, fieldA := range fields {
+ for _, fieldB := range fields[idx+1:] {
+ conflict := m.findConflict(false, fieldA, fieldB)
+ if conflict != nil {
+ conflicts.Conflicts = append(conflicts.Conflicts, conflict)
+ }
+ }
+ }
+ }
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetween(conflicts *conflictMessageContainer, parentFieldsAreMutuallyExclusive bool, fieldsMapA *sequentialFieldsMap, fieldsMapB *sequentialFieldsMap) {
+ for _, fieldsEntryA := range fieldsMapA.KeyValueIterator() {
+ fieldsB, ok := fieldsMapB.Get(fieldsEntryA.ResponseName)
+ if !ok {
+ continue
+ }
+ for _, fieldA := range fieldsEntryA.Fields {
+ for _, fieldB := range fieldsB {
+ conflict := m.findConflict(parentFieldsAreMutuallyExclusive, fieldA, fieldB)
+ if conflict != nil {
+ conflicts.Conflicts = append(conflicts.Conflicts, conflict)
+ }
+ }
+ }
+ }
+}
+
+func (m *overlappingFieldsCanBeMergedManager) findConflict(parentFieldsAreMutuallyExclusive bool, fieldA *ast.Field, fieldB *ast.Field) *ConflictMessage {
+ if fieldA.Definition == nil || fieldA.ObjectDefinition == nil || fieldB.Definition == nil || fieldB.ObjectDefinition == nil {
+ return nil
+ }
+
+ areMutuallyExclusive := parentFieldsAreMutuallyExclusive
+ if !areMutuallyExclusive {
+ tmp := fieldA.ObjectDefinition.Name != fieldB.ObjectDefinition.Name
+ tmp = tmp && fieldA.ObjectDefinition.Kind == ast.Object
+ tmp = tmp && fieldB.ObjectDefinition.Kind == ast.Object
+ areMutuallyExclusive = tmp
+ }
+
+ fieldNameA := fieldA.Name
+ if fieldA.Alias != "" {
+ fieldNameA = fieldA.Alias
+ }
+
+ if !areMutuallyExclusive {
+ // Two aliases must refer to the same field.
+ if fieldA.Name != fieldB.Name {
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ Message: fmt.Sprintf(`%s and %s are different fields`, fieldA.Name, fieldB.Name),
+ Position: fieldB.Position,
+ }
+ }
+
+ // Two field calls must have the same arguments.
+ if !sameArguments(fieldA.Arguments, fieldB.Arguments) {
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ Message: "they have differing arguments",
+ Position: fieldB.Position,
+ }
+ }
+ }
+
+ if doTypesConflict(m.walker, fieldA.Definition.Type, fieldB.Definition.Type) {
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ Message: fmt.Sprintf(`they return conflicting types %s and %s`, fieldA.Definition.Type.String(), fieldB.Definition.Type.String()),
+ Position: fieldB.Position,
+ }
+ }
+
+ // Collect and compare sub-fields. Use the same "visited fragment names" list
+ // for both collections so fields in a fragment reference are never
+ // compared to themselves.
+ conflicts := m.findConflictsBetweenSubSelectionSets(areMutuallyExclusive, fieldA.SelectionSet, fieldB.SelectionSet)
+ if conflicts == nil {
+ return nil
+ }
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ SubMessage: conflicts.Conflicts,
+ Position: fieldB.Position,
+ }
+}
+
+func sameArguments(args1 []*ast.Argument, args2 []*ast.Argument) bool {
+ if len(args1) != len(args2) {
+ return false
+ }
+ for _, arg1 := range args1 {
+ for _, arg2 := range args2 {
+ if arg1.Name != arg2.Name {
+ return false
+ }
+ if !sameValue(arg1.Value, arg2.Value) {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+func sameValue(value1 *ast.Value, value2 *ast.Value) bool {
+ if value1.Kind != value2.Kind {
+ return false
+ }
+ if value1.Raw != value2.Raw {
+ return false
+ }
+ return true
+}
+
+func doTypesConflict(walker *Walker, type1 *ast.Type, type2 *ast.Type) bool {
+ if type1.Elem != nil {
+ if type2.Elem != nil {
+ return doTypesConflict(walker, type1.Elem, type2.Elem)
+ }
+ return true
+ }
+ if type2.Elem != nil {
+ return true
+ }
+ if type1.NonNull && !type2.NonNull {
+ return true
+ }
+ if !type1.NonNull && type2.NonNull {
+ return true
+ }
+
+ t1 := walker.Schema.Types[type1.NamedType]
+ t2 := walker.Schema.Types[type2.NamedType]
+ if (t1.Kind == ast.Scalar || t1.Kind == ast.Enum) && (t2.Kind == ast.Scalar || t2.Kind == ast.Enum) {
+ return t1.Name != t2.Name
+ }
+
+ return false
+}
+
+func getFieldsAndFragmentNames(selectionSet ast.SelectionSet) (*sequentialFieldsMap, []*ast.FragmentSpread) {
+ fieldsMap := sequentialFieldsMap{
+ data: make(map[string][]*ast.Field),
+ }
+ var fragmentSpreads []*ast.FragmentSpread
+
+ var walk func(selectionSet ast.SelectionSet)
+ walk = func(selectionSet ast.SelectionSet) {
+ for _, selection := range selectionSet {
+ switch selection := selection.(type) {
+ case *ast.Field:
+ responseName := selection.Name
+ if selection.Alias != "" {
+ responseName = selection.Alias
+ }
+ fieldsMap.Push(responseName, selection)
+
+ case *ast.InlineFragment:
+ walk(selection.SelectionSet)
+
+ case *ast.FragmentSpread:
+ fragmentSpreads = append(fragmentSpreads, selection)
+ }
+ }
+ }
+ walk(selectionSet)
+
+ return &fieldsMap, fragmentSpreads
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go b/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
new file mode 100644
index 00000000..971decbf
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
@@ -0,0 +1,68 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("PossibleFragmentSpreads", func(observers *Events, addError AddErrFunc) {
+
+ validate := func(walker *Walker, parentDef *ast.Definition, fragmentName string, emitError func()) {
+ if parentDef == nil {
+ return
+ }
+
+ var parentDefs []*ast.Definition
+ switch parentDef.Kind {
+ case ast.Object:
+ parentDefs = []*ast.Definition{parentDef}
+ case ast.Interface, ast.Union:
+ parentDefs = walker.Schema.GetPossibleTypes(parentDef)
+ default:
+ panic("unexpected type")
+ }
+
+ fragmentDefType := walker.Schema.Types[fragmentName]
+ if fragmentDefType == nil {
+ return
+ }
+ if !fragmentDefType.IsCompositeType() {
+ // checked by FragmentsOnCompositeTypes
+ return
+ }
+ fragmentDefs := walker.Schema.GetPossibleTypes(fragmentDefType)
+
+ for _, fragmentDef := range fragmentDefs {
+ for _, parentDef := range parentDefs {
+ if parentDef.Name == fragmentDef.Name {
+ return
+ }
+ }
+ }
+
+ emitError()
+ }
+
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ validate(walker, inlineFragment.ObjectDefinition, inlineFragment.TypeCondition, func() {
+ addError(
+ Message(`Fragment cannot be spread here as objects of type "%s" can never be of type "%s".`, inlineFragment.ObjectDefinition.Name, inlineFragment.TypeCondition),
+ At(inlineFragment.Position),
+ )
+ })
+ })
+
+ observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
+ if fragmentSpread.Definition == nil {
+ return
+ }
+ validate(walker, fragmentSpread.ObjectDefinition, fragmentSpread.Definition.TypeCondition, func() {
+ addError(
+ Message(`Fragment "%s" cannot be spread here as objects of type "%s" can never be of type "%s".`, fragmentSpread.Name, fragmentSpread.ObjectDefinition.Name, fragmentSpread.Definition.TypeCondition),
+ At(fragmentSpread.Position),
+ )
+ })
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go b/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
new file mode 100644
index 00000000..55791a6b
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
@@ -0,0 +1,63 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) {
+
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.Definition == nil {
+ return
+ }
+
+ argDef:
+ for _, argDef := range field.Definition.Arguments {
+ if !argDef.Type.NonNull {
+ continue
+ }
+ if argDef.DefaultValue != nil {
+ continue
+ }
+ for _, arg := range field.Arguments {
+ if arg.Name == argDef.Name {
+ continue argDef
+ }
+ }
+
+ addError(
+ Message(`Field "%s" argument "%s" of type "%s" is required but not provided.`, field.Name, argDef.Name, argDef.Type.String()),
+ At(field.Position),
+ )
+ }
+ })
+
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ if directive.Definition == nil {
+ return
+ }
+
+ argDef:
+ for _, argDef := range directive.Definition.Arguments {
+ if !argDef.Type.NonNull {
+ continue
+ }
+ if argDef.DefaultValue != nil {
+ continue
+ }
+ for _, arg := range directive.Arguments {
+ if arg.Name == argDef.Name {
+ continue argDef
+ }
+ }
+
+ addError(
+ Message(`Directive "@%s" argument "%s" of type "%s" is required but not provided.`, directive.Definition.Name, argDef.Name, argDef.Type.String()),
+ At(directive.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go b/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
new file mode 100644
index 00000000..bb961f44
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
@@ -0,0 +1,36 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("ScalarLeafs", func(observers *Events, addError AddErrFunc) {
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.Definition == nil {
+ return
+ }
+
+ fieldType := walker.Schema.Types[field.Definition.Type.Name()]
+ if fieldType == nil {
+ return
+ }
+
+ if fieldType.IsLeafType() && len(field.SelectionSet) > 0 {
+ addError(
+ Message(`Field "%s" must not have a selection since type "%s" has no subfields.`, field.Name, fieldType.Name),
+ At(field.Position),
+ )
+ }
+
+ if !fieldType.IsLeafType() && len(field.SelectionSet) == 0 {
+ addError(
+ Message(`Field "%s" of type "%s" must have a selection of subfields.`, field.Name, field.Definition.Type.String()),
+ Suggestf(`"%s { ... }"`, field.Name),
+ At(field.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go b/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
new file mode 100644
index 00000000..53003c11
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
@@ -0,0 +1,30 @@
+package validator
+
+import (
+ "strconv"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("SingleFieldSubscriptions", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ if operation.Operation != ast.Subscription {
+ return
+ }
+
+ if len(operation.SelectionSet) > 1 {
+ name := "Anonymous Subscription"
+ if operation.Name != "" {
+ name = `Subscription ` + strconv.Quote(operation.Name)
+ }
+
+ addError(
+ Message(`%s must select only one top level field.`, name),
+ At(operation.SelectionSet[1].GetPosition()),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
new file mode 100644
index 00000000..0ddcde72
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
@@ -0,0 +1,33 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ checkUniqueArgs(field.Arguments, addError)
+ })
+
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ checkUniqueArgs(directive.Arguments, addError)
+ })
+ })
+}
+
+func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) {
+ knownArgNames := map[string]bool{}
+
+ for _, arg := range args {
+ if knownArgNames[arg.Name] {
+ addError(
+ Message(`There can be only one argument named "%s".`, arg.Name),
+ At(arg.Position),
+ )
+ }
+
+ knownArgNames[arg.Name] = true
+ }
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
new file mode 100644
index 00000000..077c4687
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
@@ -0,0 +1,24 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) {
+ observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) {
+ seen := map[string]bool{}
+
+ for _, dir := range directives {
+ if seen[dir.Name] {
+ addError(
+ Message(`The directive "%s" can only be used once at this location.`, dir.Name),
+ At(dir.Position),
+ )
+ }
+ seen[dir.Name] = true
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
new file mode 100644
index 00000000..46a8b7c7
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
@@ -0,0 +1,22 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) {
+ seenFragments := map[string]bool{}
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ if seenFragments[fragment.Name] {
+ addError(
+ Message(`There can be only one fragment named "%s".`, fragment.Name),
+ At(fragment.Position),
+ )
+ }
+ seenFragments[fragment.Name] = true
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
new file mode 100644
index 00000000..f254d588
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
@@ -0,0 +1,27 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if value.Kind != ast.ObjectValue {
+ return
+ }
+
+ seen := map[string]bool{}
+ for _, field := range value.Children {
+ if seen[field.Name] {
+ addError(
+ Message(`There can be only one input field named "%s".`, field.Name),
+ At(field.Position),
+ )
+ }
+ seen[field.Name] = true
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
new file mode 100644
index 00000000..c1ab56be
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
@@ -0,0 +1,22 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) {
+ seen := map[string]bool{}
+
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ if seen[operation.Name] {
+ addError(
+ Message(`There can be only one operation named "%s".`, operation.Name),
+ At(operation.Position),
+ )
+ }
+ seen[operation.Name] = true
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
new file mode 100644
index 00000000..70590a88
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
@@ -0,0 +1,23 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ seen := map[string]bool{}
+ for _, def := range operation.VariableDefinitions {
+ if seen[def.Variable] {
+ addError(
+ Message(`There can be only one variable named "%s".`, def.Variable),
+ At(def.Position),
+ )
+ }
+ seen[def.Variable] = true
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
new file mode 100644
index 00000000..d64cc666
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
@@ -0,0 +1,130 @@
+package validator
+
+import (
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if value.Definition == nil || value.ExpectedType == nil {
+ return
+ }
+
+ if value.Definition.Kind == ast.Scalar {
+ // Skip custom validating scalars
+ if !value.Definition.OneOf("Int", "Float", "String", "Boolean", "ID") {
+ return
+ }
+ }
+
+ var possibleEnums []string
+ if value.Definition.Kind == ast.Enum {
+ for _, val := range value.Definition.EnumValues {
+ possibleEnums = append(possibleEnums, val.Name)
+ }
+ }
+
+ rawVal, err := value.Value(nil)
+ if err != nil {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ switch value.Kind {
+ case ast.NullValue:
+ if value.ExpectedType.NonNull {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.ListValue:
+ if value.ExpectedType.Elem == nil {
+ unexpectedTypeMessage(addError, value)
+ return
+ }
+
+ case ast.IntValue:
+ if !value.Definition.OneOf("Int", "Float", "ID") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.FloatValue:
+ if !value.Definition.OneOf("Float") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.StringValue, ast.BlockValue:
+ if value.Definition.Kind == ast.Enum {
+ rawValStr := fmt.Sprint(rawVal)
+ addError(
+ Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
+ SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
+ At(value.Position),
+ )
+ } else if !value.Definition.OneOf("String", "ID") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.EnumValue:
+ if value.Definition.Kind != ast.Enum || value.Definition.EnumValues.ForName(value.Raw) == nil {
+ rawValStr := fmt.Sprint(rawVal)
+ addError(
+ Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
+ SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
+ At(value.Position),
+ )
+ }
+
+ case ast.BooleanValue:
+ if !value.Definition.OneOf("Boolean") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.ObjectValue:
+
+ for _, field := range value.Definition.Fields {
+ if field.Type.NonNull {
+ fieldValue := value.Children.ForName(field.Name)
+ if fieldValue == nil && field.DefaultValue == nil {
+ addError(
+ Message("Field %s.%s of required type %s was not provided.", value.Definition.Name, field.Name, field.Type.String()),
+ At(value.Position),
+ )
+ continue
+ }
+ }
+ }
+
+ for _, fieldValue := range value.Children {
+ if value.Definition.Fields.ForName(fieldValue.Name) == nil {
+ var suggestions []string
+ for _, fieldValue := range value.Definition.Fields {
+ suggestions = append(suggestions, fieldValue.Name)
+ }
+
+ addError(
+ Message(`Field "%s" is not defined by type %s.`, fieldValue.Name, value.Definition.Name),
+ SuggestListUnquoted("Did you mean", fieldValue.Name, suggestions),
+ At(fieldValue.Position),
+ )
+ }
+ }
+
+ case ast.Variable:
+ return
+
+ default:
+ panic(fmt.Errorf("unhandled %T", value))
+ }
+ })
+ })
+}
+
+func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) {
+ addError(
+ Message("Expected type %s, found %s.", v.ExpectedType.String(), v.String()),
+ At(v.Position),
+ )
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
new file mode 100644
index 00000000..9d58ae1c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
@@ -0,0 +1,28 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ for _, def := range operation.VariableDefinitions {
+ if def.Definition == nil {
+ continue
+ }
+ if !def.Definition.IsInputType() {
+ addError(
+ Message(
+ `Variable "$%s" cannot be non-input type "%s".`,
+ def.Variable,
+ def.Type.String(),
+ ),
+ At(def.Position),
+ )
+ }
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
new file mode 100644
index 00000000..e6d97c9f
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
@@ -0,0 +1,36 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("VariablesInAllowedPosition", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if value.Kind != ast.Variable || value.ExpectedType == nil || value.VariableDefinition == nil || walker.CurrentOperation == nil {
+ return
+ }
+
+ // todo: move me into walk
+ // If there is a default non nullable types can be null
+ if value.VariableDefinition.DefaultValue != nil && value.VariableDefinition.DefaultValue.Kind != ast.NullValue {
+ if value.ExpectedType.NonNull {
+ value.ExpectedType.NonNull = false
+ }
+ }
+
+ if !value.VariableDefinition.Type.IsCompatible(value.ExpectedType) {
+ addError(
+ Message(
+ `Variable "%s" of type "%s" used in position expecting type "%s".`,
+ value,
+ value.VariableDefinition.Type.String(),
+ value.ExpectedType.String(),
+ ),
+ At(value.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/schema.go b/vendor/github.com/vektah/gqlparser/validator/schema.go
new file mode 100644
index 00000000..8fa18d7e
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/schema.go
@@ -0,0 +1,212 @@
+//go:generate go run ./inliner/inliner.go
+
+package validator
+
+import (
+ "strconv"
+
+ . "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/parser"
+)
+
+func LoadSchema(inputs ...*Source) (*Schema, *gqlerror.Error) {
+ ast := &SchemaDocument{}
+ for _, input := range inputs {
+ inputAst, err := parser.ParseSchema(input)
+ if err != nil {
+ return nil, err
+ }
+
+ ast.Merge(inputAst)
+ }
+
+ schema := Schema{
+ Types: map[string]*Definition{},
+ Directives: map[string]*DirectiveDefinition{},
+ PossibleTypes: map[string][]*Definition{},
+ }
+
+ for i, def := range ast.Definitions {
+ if schema.Types[def.Name] != nil {
+ return nil, gqlerror.ErrorPosf(def.Position, "Cannot redeclare type %s.", def.Name)
+ }
+ schema.Types[def.Name] = ast.Definitions[i]
+
+ if def.Kind != Interface {
+ for _, intf := range def.Interfaces {
+ schema.AddPossibleType(intf, ast.Definitions[i])
+ }
+ schema.AddPossibleType(def.Name, ast.Definitions[i])
+ }
+ }
+
+ for _, ext := range ast.Extensions {
+ def := schema.Types[ext.Name]
+ if def == nil {
+ return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because it does not exist.", ext.Name)
+ }
+
+ if def.Kind != ext.Kind {
+ return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because the base type is a %s, not %s.", ext.Name, def.Kind, ext.Kind)
+ }
+
+ def.Directives = append(def.Directives, ext.Directives...)
+ def.Interfaces = append(def.Interfaces, ext.Interfaces...)
+ def.Fields = append(def.Fields, ext.Fields...)
+ def.Types = append(def.Types, ext.Types...)
+ def.EnumValues = append(def.EnumValues, ext.EnumValues...)
+ }
+
+ for i, dir := range ast.Directives {
+ if schema.Directives[dir.Name] != nil {
+ return nil, gqlerror.ErrorPosf(dir.Position, "Cannot redeclare directive %s.", dir.Name)
+ }
+ schema.Directives[dir.Name] = ast.Directives[i]
+ }
+
+ if len(ast.Schema) > 1 {
+ return nil, gqlerror.ErrorPosf(ast.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.")
+ }
+
+ if len(ast.Schema) == 1 {
+ for _, entrypoint := range ast.Schema[0].OperationTypes {
+ def := schema.Types[entrypoint.Type]
+ if def == nil {
+ return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type)
+ }
+ switch entrypoint.Operation {
+ case Query:
+ schema.Query = def
+ case Mutation:
+ schema.Mutation = def
+ case Subscription:
+ schema.Subscription = def
+ }
+ }
+ }
+
+ for _, ext := range ast.SchemaExtension {
+ for _, entrypoint := range ext.OperationTypes {
+ def := schema.Types[entrypoint.Type]
+ if def == nil {
+ return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type)
+ }
+ switch entrypoint.Operation {
+ case Query:
+ schema.Query = def
+ case Mutation:
+ schema.Mutation = def
+ case Subscription:
+ schema.Subscription = def
+ }
+ }
+ }
+
+ for _, typ := range schema.Types {
+ err := validateDefinition(&schema, typ)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ for _, dir := range schema.Directives {
+ err := validateDirective(&schema, dir)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ if schema.Query == nil && schema.Types["Query"] != nil {
+ schema.Query = schema.Types["Query"]
+ }
+
+ if schema.Mutation == nil && schema.Types["Mutation"] != nil {
+ schema.Mutation = schema.Types["Mutation"]
+ }
+
+ if schema.Subscription == nil && schema.Types["Subscription"] != nil {
+ schema.Subscription = schema.Types["Subscription"]
+ }
+
+ if schema.Query != nil {
+ schema.Query.Fields = append(
+ schema.Query.Fields,
+ &FieldDefinition{
+ Name: "__schema",
+ Type: NonNullNamedType("__Schema", nil),
+ },
+ &FieldDefinition{
+ Name: "__type",
+ Type: NonNullNamedType("__Type", nil),
+ Arguments: ArgumentDefinitionList{
+ {Name: "name", Type: NamedType("String", nil)},
+ },
+ },
+ )
+ }
+
+ return &schema, nil
+}
+
+func validateDirective(schema *Schema, def *DirectiveDefinition) *gqlerror.Error {
+ return validateArgs(schema, def.Arguments, def)
+}
+
+func validateDefinition(schema *Schema, def *Definition) *gqlerror.Error {
+ for _, field := range def.Fields {
+ if err := validateTypeRef(schema, field.Type); err != nil {
+ return err
+ }
+ if err := validateArgs(schema, field.Arguments, nil); err != nil {
+ return err
+ }
+ if err := validateDirectives(schema, field.Directives, nil); err != nil {
+ return err
+ }
+ }
+
+ for _, intf := range def.Interfaces {
+ intDef := schema.Types[intf]
+ if intDef == nil {
+ return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intf))
+ }
+ if intDef.Kind != Interface {
+ return gqlerror.ErrorPosf(def.Position, "%s is a non interface type %s.", strconv.Quote(intf), intDef.Kind)
+ }
+ }
+
+ return validateDirectives(schema, def.Directives, nil)
+}
+
+func validateTypeRef(schema *Schema, typ *Type) *gqlerror.Error {
+ if schema.Types[typ.Name()] == nil {
+ return gqlerror.ErrorPosf(typ.Position, "Undefined type %s.", typ.Name())
+ }
+ return nil
+}
+
+func validateArgs(schema *Schema, args ArgumentDefinitionList, currentDirective *DirectiveDefinition) *gqlerror.Error {
+ for _, arg := range args {
+ if err := validateTypeRef(schema, arg.Type); err != nil {
+ return err
+ }
+ if err := validateDirectives(schema, arg.Directives, currentDirective); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func validateDirectives(schema *Schema, dirs DirectiveList, currentDirective *DirectiveDefinition) *gqlerror.Error {
+ for _, dir := range dirs {
+ if currentDirective != nil && dir.Name == currentDirective.Name {
+ return gqlerror.ErrorPosf(dir.Position, "Directive %s cannot refer to itself.", currentDirective.Name)
+ }
+ if schema.Directives[dir.Name] == nil {
+ return gqlerror.ErrorPosf(dir.Position, "Undefined directive %s.", dir.Name)
+ }
+ dir.Definition = schema.Directives[dir.Name]
+ }
+ return nil
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/schema_test.yml b/vendor/github.com/vektah/gqlparser/validator/schema_test.yml
new file mode 100644
index 00000000..59e7145c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/schema_test.yml
@@ -0,0 +1,152 @@
+types:
+ - name: cannot be redeclared
+ input: |
+ type A {
+ name: String
+ }
+ type A {
+ name: String
+ }
+ error:
+ message: "Cannot redeclare type A."
+ locations: [{line: 4, column: 6}]
+
+interfaces:
+ - name: must exist
+ input: |
+ type Thing implements Object {
+ id: ID!
+ }
+
+ type Query {
+ Things: [Thing!]!
+ }
+ error:
+ message: 'Undefined type "Object".'
+ locations: [{line: 1, column: 6}]
+
+ - name: must be an interface
+ input: |
+ type Thing implements Object {
+ id: ID!
+ }
+
+ type Query {
+ Things: [Thing!]!
+ }
+
+ type Object {
+ name: String
+ }
+ error:
+ message: '"Object" is a non interface type OBJECT.'
+ locations: [{line: 1, column: 6}]
+
+type extensions:
+ - name: cannot extend non existant types
+ input: |
+ extend type A {
+ name: String
+ }
+ error:
+ message: "Cannot extend type A because it does not exist."
+ locations: [{line: 1, column: 13}]
+
+ - name: cannot extend incorret type existant types
+ input: |
+ scalar A
+ extend type A {
+ name: String
+ }
+ error:
+ message: "Cannot extend type A because the base type is a SCALAR, not OBJECT."
+ locations: [{line: 2, column: 13}]
+
+directives:
+ - name: cannot redeclare directives
+ input: |
+ directive @A on FIELD_DEFINITION
+ directive @A on FIELD_DEFINITION
+ error:
+ message: "Cannot redeclare directive A."
+ locations: [{line: 2, column: 12}]
+
+ - name: must be declared
+ input: |
+ type User {
+ name: String @foo
+ }
+ error:
+ message: "Undefined directive foo."
+ locations: [{line: 2, column: 17}]
+
+ - name: cannot be self-referential
+ input: |
+ directive @A(foo: Int! @A) on FIELD_DEFINITION
+ error:
+ message: "Directive A cannot refer to itself."
+ locations: [{line: 1, column: 25}]
+
+entry points:
+ - name: multiple schema entry points
+ input: |
+ schema {
+ query: Query
+ }
+ schema {
+ query: Query
+ }
+ scalar Query
+ error:
+ message: "Cannot have multiple schema entry points, consider schema extensions instead."
+ locations: [{line: 4, column: 8}]
+
+ - name: Undefined schema entrypoint
+ input: |
+ schema {
+ query: Query
+ }
+ error:
+ message: "Schema root query refers to a type Query that does not exist."
+ locations: [{line: 2, column: 3}]
+
+entry point extensions:
+ - name: Undefined schema entrypoint
+ input: |
+ schema {
+ query: Query
+ }
+ scalar Query
+ extend schema {
+ mutation: Mutation
+ }
+ error:
+ message: "Schema root mutation refers to a type Mutation that does not exist."
+ locations: [{line: 6, column: 3}]
+
+type references:
+ - name: Field types
+ input: |
+ type User {
+ posts: Post
+ }
+ error:
+ message: "Undefined type Post."
+ locations: [{line: 2, column: 10}]
+
+ - name: Arg types
+ input: |
+ type User {
+ posts(foo: FooBar): String
+ }
+ error:
+ message: "Undefined type FooBar."
+ locations: [{line: 2, column: 14}]
+
+ - name: Directive arg types
+ input: |
+ directive @Foo(foo: FooBar) on FIELD_DEFINITION
+
+ error:
+ message: "Undefined type FooBar."
+ locations: [{line: 1, column: 21}]
diff --git a/vendor/github.com/vektah/gqlparser/validator/suggestionList.go b/vendor/github.com/vektah/gqlparser/validator/suggestionList.go
new file mode 100644
index 00000000..f58d0fc2
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/suggestionList.go
@@ -0,0 +1,69 @@
+package validator
+
+import (
+ "sort"
+ "strings"
+
+ "github.com/agnivade/levenshtein"
+)
+
+// Given an invalid input string and a list of valid options, returns a filtered
+// list of valid options sorted based on their similarity with the input.
+func SuggestionList(input string, options []string) []string {
+ var results []string
+ optionsByDistance := map[string]int{}
+
+ for _, option := range options {
+ distance := lexicalDistance(input, option)
+ threshold := calcThreshold(input, option)
+ if distance <= threshold {
+ results = append(results, option)
+ optionsByDistance[option] = distance
+ }
+ }
+
+ sort.Slice(results, func(i, j int) bool {
+ return optionsByDistance[results[i]] < optionsByDistance[results[j]]
+ })
+ return results
+}
+
+func calcThreshold(a, b string) (threshold int) {
+ if len(a) >= len(b) {
+ threshold = len(a) / 2
+ } else {
+ threshold = len(b) / 2
+ }
+ if threshold < 1 {
+ threshold = 1
+ }
+ return
+}
+
+// Computes the lexical distance between strings A and B.
+//
+// The "distance" between two strings is given by counting the minimum number
+// of edits needed to transform string A into string B. An edit can be an
+// insertion, deletion, or substitution of a single character, or a swap of two
+// adjacent characters.
+//
+// Includes a custom alteration from Damerau-Levenshtein to treat case changes
+// as a single edit which helps identify mis-cased values with an edit distance
+// of 1.
+//
+// This distance can be useful for detecting typos in input or sorting
+func lexicalDistance(a, b string) int {
+ if a == b {
+ return 0
+ }
+
+ a = strings.ToLower(a)
+ b = strings.ToLower(b)
+
+ // Any case change counts as a single edit
+ if a == b {
+ return 1
+ }
+
+ return levenshtein.ComputeDistance(a, b)
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/validator.go b/vendor/github.com/vektah/gqlparser/validator/validator.go
new file mode 100644
index 00000000..bbacec6f
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/validator.go
@@ -0,0 +1,44 @@
+package validator
+
+import (
+ . "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+type AddErrFunc func(options ...ErrorOption)
+
+type ruleFunc func(observers *Events, addError AddErrFunc)
+
+type rule struct {
+ name string
+ rule ruleFunc
+}
+
+var rules []rule
+
+// addRule to rule set.
+// f is called once each time `Validate` is executed.
+func AddRule(name string, f ruleFunc) {
+ rules = append(rules, rule{name: name, rule: f})
+}
+
+func Validate(schema *Schema, doc *QueryDocument) gqlerror.List {
+ var errs gqlerror.List
+
+ observers := &Events{}
+ for i := range rules {
+ rule := rules[i]
+ rule.rule(observers, func(options ...ErrorOption) {
+ err := &gqlerror.Error{
+ Rule: rule.name,
+ }
+ for _, o := range options {
+ o(err)
+ }
+ errs = append(errs, err)
+ })
+ }
+
+ Walk(schema, doc, observers)
+ return errs
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/vars.go b/vendor/github.com/vektah/gqlparser/validator/vars.go
new file mode 100644
index 00000000..0743f5cc
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/vars.go
@@ -0,0 +1,195 @@
+package validator
+
+import (
+ "reflect"
+
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+var UnexpectedType = fmt.Errorf("Unexpected Type")
+
+// VariableValues coerces and validates variable values
+func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables map[string]interface{}) (map[string]interface{}, *gqlerror.Error) {
+ coercedVars := map[string]interface{}{}
+
+ validator := varValidator{
+ path: []interface{}{"variable"},
+ schema: schema,
+ }
+
+ for _, v := range op.VariableDefinitions {
+ validator.path = append(validator.path, v.Variable)
+
+ if !v.Definition.IsInputType() {
+ return nil, gqlerror.ErrorPathf(validator.path, "must an input type")
+ }
+
+ val, hasValue := variables[v.Variable]
+ if !hasValue {
+ if v.DefaultValue != nil {
+ var err error
+ val, err = v.DefaultValue.Value(nil)
+ if err != nil {
+ return nil, gqlerror.WrapPath(validator.path, err)
+ }
+ hasValue = true
+ } else if v.Type.NonNull {
+ return nil, gqlerror.ErrorPathf(validator.path, "must be defined")
+ }
+ }
+
+ if hasValue {
+ if val == nil {
+ if v.Type.NonNull {
+ return nil, gqlerror.ErrorPathf(validator.path, "cannot be null")
+ }
+ coercedVars[v.Variable] = nil
+ } else {
+ rv := reflect.ValueOf(val)
+ if rv.Kind() == reflect.Ptr || rv.Kind() == reflect.Interface {
+ rv = rv.Elem()
+ }
+
+ if err := validator.validateVarType(v.Type, rv); err != nil {
+ return nil, err
+ }
+
+ coercedVars[v.Variable] = val
+ }
+ }
+
+ validator.path = validator.path[0 : len(validator.path)-1]
+ }
+
+ return coercedVars, nil
+}
+
+type varValidator struct {
+ path []interface{}
+ schema *ast.Schema
+}
+
+func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) *gqlerror.Error {
+ if typ.Elem != nil {
+ if val.Kind() != reflect.Slice {
+ return gqlerror.ErrorPathf(v.path, "must be an array")
+ }
+
+ for i := 0; i < val.Len(); i++ {
+ v.path = append(v.path, i)
+ field := val.Index(i)
+
+ if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface {
+ if typ.Elem.NonNull && field.IsNil() {
+ return gqlerror.ErrorPathf(v.path, "cannot be null")
+ }
+ field = field.Elem()
+ }
+
+ if err := v.validateVarType(typ.Elem, field); err != nil {
+ return err
+ }
+
+ v.path = v.path[0 : len(v.path)-1]
+ }
+
+ return nil
+ }
+
+ def := v.schema.Types[typ.NamedType]
+ if def == nil {
+ panic(fmt.Errorf("missing def for %s", typ.NamedType))
+ }
+
+ switch def.Kind {
+ case ast.Enum:
+ kind := val.Type().Kind()
+ if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String {
+ return nil
+ }
+ return gqlerror.ErrorPathf(v.path, "enums must be ints or strings")
+ case ast.Scalar:
+ kind := val.Type().Kind()
+ switch typ.NamedType {
+ case "Int":
+ if kind == reflect.String || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 {
+ return nil
+ }
+ case "Float":
+ if kind == reflect.String || kind == reflect.Float32 || kind == reflect.Float64 || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 {
+ return nil
+ }
+ case "String":
+ if kind == reflect.String {
+ return nil
+ }
+
+ case "Boolean":
+ if kind == reflect.Bool {
+ return nil
+ }
+
+ case "ID":
+ if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String {
+ return nil
+ }
+ default:
+ // assume custom scalars are ok
+ return nil
+ }
+ return gqlerror.ErrorPathf(v.path, "cannot use %s as %s", kind.String(), typ.NamedType)
+ case ast.InputObject:
+ if val.Kind() != reflect.Map {
+ return gqlerror.ErrorPathf(v.path, "must be a %s", def.Name)
+ }
+
+ // check for unknown fields
+ for _, name := range val.MapKeys() {
+ val.MapIndex(name)
+ fieldDef := def.Fields.ForName(name.String())
+ v.path = append(v.path, name)
+
+ if fieldDef == nil {
+ return gqlerror.ErrorPathf(v.path, "unknown field")
+ }
+ v.path = v.path[0 : len(v.path)-1]
+ }
+
+ for _, fieldDef := range def.Fields {
+ v.path = append(v.path, fieldDef.Name)
+
+ field := val.MapIndex(reflect.ValueOf(fieldDef.Name))
+ if !field.IsValid() {
+ if fieldDef.Type.NonNull {
+ return gqlerror.ErrorPathf(v.path, "must be defined")
+ }
+ continue
+ }
+
+ if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface {
+ if fieldDef.Type.NonNull && field.IsNil() {
+ return gqlerror.ErrorPathf(v.path, "cannot be null")
+ }
+ //allow null object field and skip it
+ if !fieldDef.Type.NonNull && field.IsNil() {
+ continue
+ }
+ field = field.Elem()
+ }
+
+ err := v.validateVarType(fieldDef.Type, field)
+ if err != nil {
+ return err
+ }
+
+ v.path = v.path[0 : len(v.path)-1]
+ }
+ default:
+ panic(fmt.Errorf("unsupported type %s", def.Kind))
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/walk.go b/vendor/github.com/vektah/gqlparser/validator/walk.go
new file mode 100644
index 00000000..751ba1f1
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/walk.go
@@ -0,0 +1,286 @@
+package validator
+
+import (
+ "context"
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+type Events struct {
+ operationVisitor []func(walker *Walker, operation *ast.OperationDefinition)
+ field []func(walker *Walker, field *ast.Field)
+ fragment []func(walker *Walker, fragment *ast.FragmentDefinition)
+ inlineFragment []func(walker *Walker, inlineFragment *ast.InlineFragment)
+ fragmentSpread []func(walker *Walker, fragmentSpread *ast.FragmentSpread)
+ directive []func(walker *Walker, directive *ast.Directive)
+ directiveList []func(walker *Walker, directives []*ast.Directive)
+ value []func(walker *Walker, value *ast.Value)
+}
+
+func (o *Events) OnOperation(f func(walker *Walker, operation *ast.OperationDefinition)) {
+ o.operationVisitor = append(o.operationVisitor, f)
+}
+func (o *Events) OnField(f func(walker *Walker, field *ast.Field)) {
+ o.field = append(o.field, f)
+}
+func (o *Events) OnFragment(f func(walker *Walker, fragment *ast.FragmentDefinition)) {
+ o.fragment = append(o.fragment, f)
+}
+func (o *Events) OnInlineFragment(f func(walker *Walker, inlineFragment *ast.InlineFragment)) {
+ o.inlineFragment = append(o.inlineFragment, f)
+}
+func (o *Events) OnFragmentSpread(f func(walker *Walker, fragmentSpread *ast.FragmentSpread)) {
+ o.fragmentSpread = append(o.fragmentSpread, f)
+}
+func (o *Events) OnDirective(f func(walker *Walker, directive *ast.Directive)) {
+ o.directive = append(o.directive, f)
+}
+func (o *Events) OnDirectiveList(f func(walker *Walker, directives []*ast.Directive)) {
+ o.directiveList = append(o.directiveList, f)
+}
+func (o *Events) OnValue(f func(walker *Walker, value *ast.Value)) {
+ o.value = append(o.value, f)
+}
+
+func Walk(schema *ast.Schema, document *ast.QueryDocument, observers *Events) {
+ w := Walker{
+ Observers: observers,
+ Schema: schema,
+ Document: document,
+ }
+
+ w.walk()
+}
+
+type Walker struct {
+ Context context.Context
+ Observers *Events
+ Schema *ast.Schema
+ Document *ast.QueryDocument
+
+ validatedFragmentSpreads map[string]bool
+ CurrentOperation *ast.OperationDefinition
+}
+
+func (w *Walker) walk() {
+ for _, child := range w.Document.Operations {
+ w.validatedFragmentSpreads = make(map[string]bool)
+ w.walkOperation(child)
+ }
+ for _, child := range w.Document.Fragments {
+ w.validatedFragmentSpreads = make(map[string]bool)
+ w.walkFragment(child)
+ }
+}
+
+func (w *Walker) walkOperation(operation *ast.OperationDefinition) {
+ w.CurrentOperation = operation
+ for _, varDef := range operation.VariableDefinitions {
+ varDef.Definition = w.Schema.Types[varDef.Type.Name()]
+
+ if varDef.DefaultValue != nil {
+ varDef.DefaultValue.ExpectedType = varDef.Type
+ varDef.DefaultValue.Definition = w.Schema.Types[varDef.Type.Name()]
+ }
+ }
+
+ var def *ast.Definition
+ var loc ast.DirectiveLocation
+ switch operation.Operation {
+ case ast.Query, "":
+ def = w.Schema.Query
+ loc = ast.LocationQuery
+ case ast.Mutation:
+ def = w.Schema.Mutation
+ loc = ast.LocationMutation
+ case ast.Subscription:
+ def = w.Schema.Subscription
+ loc = ast.LocationSubscription
+ }
+
+ w.walkDirectives(def, operation.Directives, loc)
+
+ for _, varDef := range operation.VariableDefinitions {
+ if varDef.DefaultValue != nil {
+ w.walkValue(varDef.DefaultValue)
+ }
+ }
+
+ w.walkSelectionSet(def, operation.SelectionSet)
+
+ for _, v := range w.Observers.operationVisitor {
+ v(w, operation)
+ }
+ w.CurrentOperation = nil
+}
+
+func (w *Walker) walkFragment(it *ast.FragmentDefinition) {
+ def := w.Schema.Types[it.TypeCondition]
+
+ it.Definition = def
+
+ w.walkDirectives(def, it.Directives, ast.LocationFragmentDefinition)
+ w.walkSelectionSet(def, it.SelectionSet)
+
+ for _, v := range w.Observers.fragment {
+ v(w, it)
+ }
+}
+
+func (w *Walker) walkDirectives(parentDef *ast.Definition, directives []*ast.Directive, location ast.DirectiveLocation) {
+ for _, dir := range directives {
+ def := w.Schema.Directives[dir.Name]
+ dir.Definition = def
+ dir.ParentDefinition = parentDef
+ dir.Location = location
+
+ for _, arg := range dir.Arguments {
+ var argDef *ast.ArgumentDefinition
+ if def != nil {
+ argDef = def.Arguments.ForName(arg.Name)
+ }
+
+ w.walkArgument(argDef, arg)
+ }
+
+ for _, v := range w.Observers.directive {
+ v(w, dir)
+ }
+ }
+
+ for _, v := range w.Observers.directiveList {
+ v(w, directives)
+ }
+}
+
+func (w *Walker) walkValue(value *ast.Value) {
+ if value.Kind == ast.Variable && w.CurrentOperation != nil {
+ value.VariableDefinition = w.CurrentOperation.VariableDefinitions.ForName(value.Raw)
+ if value.VariableDefinition != nil {
+ value.VariableDefinition.Used = true
+ }
+ }
+
+ if value.Kind == ast.ObjectValue {
+ for _, child := range value.Children {
+ if value.Definition != nil {
+ fieldDef := value.Definition.Fields.ForName(child.Name)
+ if fieldDef != nil {
+ child.Value.ExpectedType = fieldDef.Type
+ child.Value.Definition = w.Schema.Types[fieldDef.Type.Name()]
+ }
+ }
+ w.walkValue(child.Value)
+ }
+ }
+
+ if value.Kind == ast.ListValue {
+ for _, child := range value.Children {
+ if value.ExpectedType != nil && value.ExpectedType.Elem != nil {
+ child.Value.ExpectedType = value.ExpectedType.Elem
+ child.Value.Definition = value.Definition
+ }
+
+ w.walkValue(child.Value)
+ }
+ }
+
+ for _, v := range w.Observers.value {
+ v(w, value)
+ }
+}
+
+func (w *Walker) walkArgument(argDef *ast.ArgumentDefinition, arg *ast.Argument) {
+ if argDef != nil {
+ arg.Value.ExpectedType = argDef.Type
+ arg.Value.Definition = w.Schema.Types[argDef.Type.Name()]
+ }
+
+ w.walkValue(arg.Value)
+}
+
+func (w *Walker) walkSelectionSet(parentDef *ast.Definition, it ast.SelectionSet) {
+ for _, child := range it {
+ w.walkSelection(parentDef, child)
+ }
+}
+
+func (w *Walker) walkSelection(parentDef *ast.Definition, it ast.Selection) {
+ switch it := it.(type) {
+ case *ast.Field:
+ var def *ast.FieldDefinition
+ if it.Name == "__typename" {
+ def = &ast.FieldDefinition{
+ Name: "__typename",
+ Type: ast.NamedType("String", nil),
+ }
+ } else if parentDef != nil {
+ def = parentDef.Fields.ForName(it.Name)
+ }
+
+ it.Definition = def
+ it.ObjectDefinition = parentDef
+
+ var nextParentDef *ast.Definition
+ if def != nil {
+ nextParentDef = w.Schema.Types[def.Type.Name()]
+ }
+
+ for _, arg := range it.Arguments {
+ var argDef *ast.ArgumentDefinition
+ if def != nil {
+ argDef = def.Arguments.ForName(arg.Name)
+ }
+
+ w.walkArgument(argDef, arg)
+ }
+
+ w.walkDirectives(nextParentDef, it.Directives, ast.LocationField)
+ w.walkSelectionSet(nextParentDef, it.SelectionSet)
+
+ for _, v := range w.Observers.field {
+ v(w, it)
+ }
+
+ case *ast.InlineFragment:
+ it.ObjectDefinition = parentDef
+
+ nextParentDef := parentDef
+ if it.TypeCondition != "" {
+ nextParentDef = w.Schema.Types[it.TypeCondition]
+ }
+
+ w.walkDirectives(nextParentDef, it.Directives, ast.LocationInlineFragment)
+ w.walkSelectionSet(nextParentDef, it.SelectionSet)
+
+ for _, v := range w.Observers.inlineFragment {
+ v(w, it)
+ }
+
+ case *ast.FragmentSpread:
+ def := w.Document.Fragments.ForName(it.Name)
+ it.Definition = def
+ it.ObjectDefinition = parentDef
+
+ var nextParentDef *ast.Definition
+ if def != nil {
+ nextParentDef = w.Schema.Types[def.TypeCondition]
+ }
+
+ w.walkDirectives(nextParentDef, it.Directives, ast.LocationFragmentSpread)
+
+ if def != nil && !w.validatedFragmentSpreads[def.Name] {
+ // prevent inifinite recursion
+ w.validatedFragmentSpreads[def.Name] = true
+ w.walkSelectionSet(nextParentDef, def.SelectionSet)
+ }
+
+ for _, v := range w.Observers.fragmentSpread {
+ v(w, it)
+ }
+
+ default:
+ panic(fmt.Errorf("unsupported %T", it))
+ }
+}