aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMichael Muré <batolettre@gmail.com>2018-09-14 12:40:31 +0200
committerMichael Muré <batolettre@gmail.com>2018-09-14 12:41:59 +0200
commitb478cd1bcb4756b20f7f4b15fcf81f23e1a60a02 (patch)
tree8ce232dcab3dd00708f8ba66c334472457e5980d
parenta3fc9abb921f5ce7084d6ab7473442d0b72b1d78 (diff)
downloadgit-bug-b478cd1bcb4756b20f7f4b15fcf81f23e1a60a02.tar.gz
graphql: update gqlgen to 0.5.1
fix #6
-rw-r--r--Gopkg.lock72
-rw-r--r--Gopkg.toml4
-rw-r--r--commands/webui.go2
-rw-r--r--graphql/gen_graphql.go6
-rw-r--r--graphql/graph/gen_graph.go5968
-rw-r--r--graphql/handler.go8
-rw-r--r--graphql/models/gen_models.go13
-rw-r--r--graphql/schema.graphql84
-rw-r--r--vendor/github.com/99designs/gqlgen/LICENSE (renamed from vendor/github.com/vektah/gqlgen/LICENSE)0
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/build.go214
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/codegen.go174
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/config.go (renamed from vendor/github.com/vektah/gqlgen/codegen/config.go)71
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/directive.go41
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/directive_build.go49
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/enum.go (renamed from vendor/github.com/vektah/gqlgen/codegen/enum.go)4
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/enum_build.go39
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/import.go (renamed from vendor/github.com/vektah/gqlgen/codegen/import.go)0
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/import_build.go (renamed from vendor/github.com/vektah/gqlgen/codegen/import_build.go)28
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/input_build.go (renamed from vendor/github.com/vektah/gqlgen/codegen/input_build.go)38
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/interface.go (renamed from vendor/github.com/vektah/gqlgen/codegen/interface.go)0
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/interface_build.go (renamed from vendor/github.com/vektah/gqlgen/codegen/interface_build.go)50
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/model.go16
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/models_build.go (renamed from vendor/github.com/vektah/gqlgen/codegen/models_build.go)29
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/object.go464
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/object_build.go181
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/args.gotpl13
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/data.go13
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/field.gotpl74
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/generated.gotpl263
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/input.gotpl (renamed from vendor/github.com/vektah/gqlgen/codegen/templates/input.gotpl)2
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/interface.gotpl (renamed from vendor/github.com/vektah/gqlgen/codegen/templates/interface.gotpl)2
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/models.gotpl (renamed from vendor/github.com/vektah/gqlgen/codegen/templates/models.gotpl)17
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/object.gotpl (renamed from vendor/github.com/vektah/gqlgen/codegen/templates/object.gotpl)29
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/resolver.gotpl33
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/server.gotpl22
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/templates.go (renamed from vendor/github.com/vektah/gqlgen/codegen/templates/templates.go)54
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/type.go (renamed from vendor/github.com/vektah/gqlgen/codegen/type.go)22
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/type_build.go (renamed from vendor/github.com/vektah/gqlgen/codegen/type_build.go)63
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/util.go (renamed from vendor/github.com/vektah/gqlgen/codegen/util.go)93
-rw-r--r--vendor/github.com/99designs/gqlgen/complexity/complexity.go104
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/bool.go (renamed from vendor/github.com/vektah/gqlgen/graphql/bool.go)0
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/context.go (renamed from vendor/github.com/vektah/gqlgen/graphql/context.go)121
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/error.go31
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/exec.go135
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/float.go (renamed from vendor/github.com/vektah/gqlgen/graphql/float.go)5
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/id.go (renamed from vendor/github.com/vektah/gqlgen/graphql/id.go)3
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/int.go (renamed from vendor/github.com/vektah/gqlgen/graphql/int.go)5
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/introspection/introspection.go58
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/introspection/query.go (renamed from vendor/github.com/vektah/gqlgen/neelance/introspection/query.go)0
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/introspection/schema.go68
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/introspection/type.go174
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/jsonw.go (renamed from vendor/github.com/vektah/gqlgen/graphql/jsonw.go)14
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/map.go (renamed from vendor/github.com/vektah/gqlgen/graphql/map.go)0
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/oneshot.go (renamed from vendor/github.com/vektah/gqlgen/graphql/oneshot.go)0
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/recovery.go (renamed from vendor/github.com/vektah/gqlgen/graphql/recovery.go)0
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/response.go (renamed from vendor/github.com/vektah/gqlgen/graphql/response.go)6
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/string.go (renamed from vendor/github.com/vektah/gqlgen/graphql/string.go)0
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/time.go (renamed from vendor/github.com/vektah/gqlgen/graphql/time.go)0
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/version.go3
-rw-r--r--vendor/github.com/99designs/gqlgen/handler/graphql.go (renamed from vendor/github.com/vektah/gqlgen/handler/graphql.go)144
-rw-r--r--vendor/github.com/99designs/gqlgen/handler/playground.go (renamed from vendor/github.com/vektah/gqlgen/handler/playground.go)5
-rw-r--r--vendor/github.com/99designs/gqlgen/handler/stub.go51
-rw-r--r--vendor/github.com/99designs/gqlgen/handler/websocket.go (renamed from vendor/github.com/vektah/gqlgen/handler/websocket.go)51
-rw-r--r--vendor/github.com/99designs/gqlgen/internal/gopath/gopath.go37
-rw-r--r--vendor/github.com/agnivade/levenshtein/.gitignore5
-rw-r--r--vendor/github.com/agnivade/levenshtein/.travis.yml7
-rw-r--r--vendor/github.com/agnivade/levenshtein/License.txt21
-rw-r--r--vendor/github.com/agnivade/levenshtein/Makefile13
-rw-r--r--vendor/github.com/agnivade/levenshtein/README.md57
-rw-r--r--vendor/github.com/agnivade/levenshtein/go.mod1
-rw-r--r--vendor/github.com/agnivade/levenshtein/levenshtein.go71
-rw-r--r--vendor/github.com/hashicorp/golang-lru/.gitignore23
-rw-r--r--vendor/github.com/hashicorp/golang-lru/2q.go223
-rw-r--r--vendor/github.com/hashicorp/golang-lru/LICENSE362
-rw-r--r--vendor/github.com/hashicorp/golang-lru/README.md25
-rw-r--r--vendor/github.com/hashicorp/golang-lru/arc.go257
-rw-r--r--vendor/github.com/hashicorp/golang-lru/doc.go21
-rw-r--r--vendor/github.com/hashicorp/golang-lru/go.mod1
-rw-r--r--vendor/github.com/hashicorp/golang-lru/lru.go110
-rw-r--r--vendor/github.com/hashicorp/golang-lru/simplelru/lru.go161
-rw-r--r--vendor/github.com/hashicorp/golang-lru/simplelru/lru_interface.go36
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/build.go165
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/codegen.go153
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/enum_build.go39
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/model.go15
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/object.go206
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/object_build.go144
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/args.gotpl30
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/data.go11
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/field.gotpl80
-rw-r--r--vendor/github.com/vektah/gqlgen/codegen/templates/generated.gotpl175
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/defer.go30
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/error.go46
-rw-r--r--vendor/github.com/vektah/gqlgen/graphql/exec.go118
-rw-r--r--vendor/github.com/vektah/gqlgen/handler/stub.go45
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/LICENSE24
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/directive.go32
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/lexer.go122
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/literals.go206
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/types.go80
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/common/values.go77
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/errors/errors.go41
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/introspection/introspection.go313
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/query/query.go261
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/schema/meta.go193
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/schema/schema.go489
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/validation/suggestion.go71
-rw-r--r--vendor/github.com/vektah/gqlgen/neelance/validation/validation.go861
-rw-r--r--vendor/github.com/vektah/gqlparser/.gitignore5
-rw-r--r--vendor/github.com/vektah/gqlparser/.gometalinter.json13
-rw-r--r--vendor/github.com/vektah/gqlparser/LICENSE19
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/argmap.go37
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/collections.go138
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/definition.go92
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/directive.go42
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/document.go65
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/dumper.go159
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/fragment.go38
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/operation.go29
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/selection.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/source.go14
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/type.go68
-rw-r--r--vendor/github.com/vektah/gqlparser/ast/value.go120
-rw-r--r--vendor/github.com/vektah/gqlparser/gqlerror/error.go133
-rw-r--r--vendor/github.com/vektah/gqlparser/gqlparser.go42
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/blockstring.go58
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/lexer.go510
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml672
-rw-r--r--vendor/github.com/vektah/gqlparser/lexer/token.go148
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/parser.go112
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/query.go334
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/query_test.yml507
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/schema.go503
-rw-r--r--vendor/github.com/vektah/gqlparser/parser/schema_test.yml505
-rw-r--r--vendor/github.com/vektah/gqlparser/readme.md17
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/error.go55
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/imported/LICENSE (renamed from vendor/github.com/vektah/gqlgen/neelance/tests/testdata/LICENSE)0
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/messaging.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/prelude.go5
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/prelude.graphql119
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go86
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go39
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go57
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go31
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go61
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go19
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go93
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go553
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go68
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go63
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go36
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go30
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go33
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go24
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go27
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go22
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go23
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go130
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go28
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go36
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/schema.go212
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/schema_test.yml152
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/suggestionList.go69
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/validator.go44
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/vars.go195
-rw-r--r--vendor/github.com/vektah/gqlparser/validator/walk.go286
171 files changed, 15589 insertions, 6285 deletions
diff --git a/Gopkg.lock b/Gopkg.lock
index 3f023fb5..fc80249d 100644
--- a/Gopkg.lock
+++ b/Gopkg.lock
@@ -2,6 +2,30 @@
[[projects]]
+ digest = "1:2af7a7a1fcb231f9cac066e51c629370834819ac3a776dc0a3f577d28cb1fc28"
+ name = "github.com/99designs/gqlgen"
+ packages = [
+ "codegen",
+ "codegen/templates",
+ "complexity",
+ "graphql",
+ "graphql/introspection",
+ "handler",
+ "internal/gopath",
+ ]
+ pruneopts = "UT"
+ revision = "636435b68700211441303f1a5ed92f3768ba5774"
+ version = "v0.5.1"
+
+[[projects]]
+ digest = "1:897d91c431ce469d35a5e6030e60e617dccd9a0e95bdffa6a80594f5c5800d29"
+ name = "github.com/agnivade/levenshtein"
+ packages = ["."]
+ pruneopts = "UT"
+ revision = "3d21ba515fe27b856f230847e856431ae1724adc"
+ version = "v1.0.0"
+
+[[projects]]
branch = "master"
digest = "1:f438d91be142877c3ad83157992c91de787ddfbddcc2a7da1ef6ef61606cadc4"
name = "github.com/cheekybits/genny"
@@ -66,6 +90,17 @@
version = "v1.2.0"
[[projects]]
+ digest = "1:8ec8d88c248041a6df5f6574b87bc00e7e0b493881dad2e7ef47b11dc69093b5"
+ name = "github.com/hashicorp/golang-lru"
+ packages = [
+ ".",
+ "simplelru",
+ ]
+ pruneopts = "UT"
+ revision = "20f1fb78b0740ba8c3cb143a61e86ba5c8669768"
+ version = "v0.5.0"
+
+[[projects]]
branch = "master"
digest = "1:22725c01ecd8ed0c0f0078944305a57053340d92878b02db925c660cc4accf64"
name = "github.com/icrowley/fake"
@@ -192,23 +227,20 @@
version = "v1.0.1"
[[projects]]
- digest = "1:d1ef15a6bc267ffb9e3ac790591103e99d4662f5867a99f9182c43a2016884e2"
- name = "github.com/vektah/gqlgen"
+ branch = "master"
+ digest = "1:8150271279cc160a41e9aabfee8118c20a0e88894a25b2577f93e7c868e5259c"
+ name = "github.com/vektah/gqlparser"
packages = [
- "codegen",
- "codegen/templates",
- "graphql",
- "handler",
- "neelance/common",
- "neelance/errors",
- "neelance/introspection",
- "neelance/query",
- "neelance/schema",
- "neelance/validation",
+ ".",
+ "ast",
+ "gqlerror",
+ "lexer",
+ "parser",
+ "validator",
+ "validator/rules",
]
pruneopts = "UT"
- revision = "381b34691fd93829e50ba8821412dc3467ec4821"
- version = "0.3.0"
+ revision = "14e83ae06ec152e6d0afb9766a00e0c0918aa8fc"
[[projects]]
branch = "master"
@@ -245,6 +277,10 @@
analyzer-name = "dep"
analyzer-version = 1
input-imports = [
+ "github.com/99designs/gqlgen/codegen",
+ "github.com/99designs/gqlgen/graphql",
+ "github.com/99designs/gqlgen/graphql/introspection",
+ "github.com/99designs/gqlgen/handler",
"github.com/cheekybits/genny/generic",
"github.com/dustin/go-humanize",
"github.com/fatih/color",
@@ -258,12 +294,8 @@
"github.com/skratchdot/open-golang/open",
"github.com/spf13/cobra",
"github.com/spf13/cobra/doc",
- "github.com/vektah/gqlgen/codegen",
- "github.com/vektah/gqlgen/graphql",
- "github.com/vektah/gqlgen/handler",
- "github.com/vektah/gqlgen/neelance/introspection",
- "github.com/vektah/gqlgen/neelance/query",
- "github.com/vektah/gqlgen/neelance/schema",
+ "github.com/vektah/gqlparser",
+ "github.com/vektah/gqlparser/ast",
]
solver-name = "gps-cdcl"
solver-version = 1
diff --git a/Gopkg.toml b/Gopkg.toml
index 19d41fd1..445a12eb 100644
--- a/Gopkg.toml
+++ b/Gopkg.toml
@@ -57,8 +57,8 @@
branch = "master"
[[constraint]]
- name = "github.com/vektah/gqlgen"
- version = "0.3.0"
+ name = "github.com/99designs/gqlgen"
+ version = "0.5.1"
[[constraint]]
name = "github.com/jroimartin/gocui"
diff --git a/commands/webui.go b/commands/webui.go
index d64bca53..d44bebba 100644
--- a/commands/webui.go
+++ b/commands/webui.go
@@ -12,6 +12,7 @@ import (
"os/signal"
"time"
+ "github.com/99designs/gqlgen/handler"
"github.com/MichaelMure/git-bug/graphql"
"github.com/MichaelMure/git-bug/repository"
"github.com/MichaelMure/git-bug/util/git"
@@ -20,7 +21,6 @@ import (
"github.com/phayes/freeport"
"github.com/skratchdot/open-golang/open"
"github.com/spf13/cobra"
- "github.com/vektah/gqlgen/handler"
)
var port int
diff --git a/graphql/gen_graphql.go b/graphql/gen_graphql.go
index fb0cfa68..84e0cfe0 100644
--- a/graphql/gen_graphql.go
+++ b/graphql/gen_graphql.go
@@ -9,7 +9,7 @@ import (
"os"
"path"
- "github.com/vektah/gqlgen/codegen"
+ "github.com/99designs/gqlgen/codegen"
)
func main() {
@@ -22,9 +22,9 @@ func main() {
fmt.Println("Generating graphql code ...")
- log.SetOutput(ioutil.Discard)
+ log.SetOutput(os.Stdout)
- config, err := codegen.LoadDefaultConfig()
+ config, err := codegen.LoadConfigFromDefaultLocations()
if err != nil {
log.Fatal(err)
}
diff --git a/graphql/graph/gen_graph.go b/graphql/graph/gen_graph.go
index a3ea1738..9243d07a 100644
--- a/graphql/graph/gen_graph.go
+++ b/graphql/graph/gen_graph.go
@@ -1,4 +1,4 @@
-// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.
+// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
package graph
@@ -7,59 +7,32 @@ import (
context "context"
fmt "fmt"
strconv "strconv"
+ sync "sync"
time "time"
+ graphql "github.com/99designs/gqlgen/graphql"
+ introspection "github.com/99designs/gqlgen/graphql/introspection"
bug "github.com/MichaelMure/git-bug/bug"
models "github.com/MichaelMure/git-bug/graphql/models"
operations "github.com/MichaelMure/git-bug/operations"
git "github.com/MichaelMure/git-bug/util/git"
- graphql "github.com/vektah/gqlgen/graphql"
- introspection "github.com/vektah/gqlgen/neelance/introspection"
- query "github.com/vektah/gqlgen/neelance/query"
- schema "github.com/vektah/gqlgen/neelance/schema"
+ gqlparser "github.com/vektah/gqlparser"
+ ast "github.com/vektah/gqlparser/ast"
)
-// MakeExecutableSchema creates an ExecutableSchema from the Resolvers interface.
-func MakeExecutableSchema(resolvers Resolvers) graphql.ExecutableSchema {
- return &executableSchema{resolvers: resolvers}
-}
-
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
-func NewExecutableSchema(resolvers ResolverRoot) graphql.ExecutableSchema {
- return MakeExecutableSchema(shortMapper{r: resolvers})
+func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
+ return &executableSchema{
+ resolvers: cfg.Resolvers,
+ directives: cfg.Directives,
+ complexity: cfg.Complexity,
+ }
}
-type Resolvers interface {
- AddCommentOperation_date(ctx context.Context, obj *operations.AddCommentOperation) (time.Time, error)
-
- Bug_status(ctx context.Context, obj *bug.Snapshot) (models.Status, error)
-
- Bug_lastEdit(ctx context.Context, obj *bug.Snapshot) (time.Time, error)
- Bug_comments(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.CommentConnection, error)
- Bug_operations(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.OperationConnection, error)
-
- CreateOperation_date(ctx context.Context, obj *operations.CreateOperation) (time.Time, error)
-
- LabelChangeOperation_date(ctx context.Context, obj *operations.LabelChangeOperation) (time.Time, error)
-
- Mutation_newBug(ctx context.Context, repoRef *string, title string, message string, files []git.Hash) (bug.Snapshot, error)
- Mutation_addComment(ctx context.Context, repoRef *string, prefix string, message string, files []git.Hash) (bug.Snapshot, error)
- Mutation_changeLabels(ctx context.Context, repoRef *string, prefix string, added []string, removed []string) (bug.Snapshot, error)
- Mutation_open(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error)
- Mutation_close(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error)
- Mutation_setTitle(ctx context.Context, repoRef *string, prefix string, title string) (bug.Snapshot, error)
- Mutation_commit(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error)
-
- Query_defaultRepository(ctx context.Context) (*models.Repository, error)
- Query_repository(ctx context.Context, id string) (*models.Repository, error)
-
- Repository_allBugs(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int, query *string) (models.BugConnection, error)
- Repository_bug(ctx context.Context, obj *models.Repository, prefix string) (*bug.Snapshot, error)
-
- SetStatusOperation_date(ctx context.Context, obj *operations.SetStatusOperation) (time.Time, error)
- SetStatusOperation_status(ctx context.Context, obj *operations.SetStatusOperation) (models.Status, error)
-
- SetTitleOperation_date(ctx context.Context, obj *operations.SetTitleOperation) (time.Time, error)
+type Config struct {
+ Resolvers ResolverRoot
+ Directives DirectiveRoot
+ Complexity ComplexityRoot
}
type ResolverRoot interface {
@@ -73,6 +46,134 @@ type ResolverRoot interface {
SetStatusOperation() SetStatusOperationResolver
SetTitleOperation() SetTitleOperationResolver
}
+
+type DirectiveRoot struct {
+}
+
+type ComplexityRoot struct {
+ AddCommentOperation struct {
+ Author func(childComplexity int) int
+ Date func(childComplexity int) int
+ Message func(childComplexity int) int
+ Files func(childComplexity int) int
+ }
+
+ Bug struct {
+ Id func(childComplexity int) int
+ HumanId func(childComplexity int) int
+ Status func(childComplexity int) int
+ Title func(childComplexity int) int
+ Labels func(childComplexity int) int
+ Author func(childComplexity int) int
+ CreatedAt func(childComplexity int) int
+ LastEdit func(childComplexity int) int
+ Comments func(childComplexity int, after *string, before *string, first *int, last *int) int
+ Operations func(childComplexity int, after *string, before *string, first *int, last *int) int
+ }
+
+ BugConnection struct {
+ Edges func(childComplexity int) int
+ Nodes func(childComplexity int) int
+ PageInfo func(childComplexity int) int
+ TotalCount func(childComplexity int) int
+ }
+
+ BugEdge struct {
+ Cursor func(childComplexity int) int
+ Node func(childComplexity int) int
+ }
+
+ Comment struct {
+ Author func(childComplexity int) int
+ Message func(childComplexity int) int
+ Files func(childComplexity int) int
+ }
+
+ CommentConnection struct {
+ Edges func(childComplexity int) int
+ Nodes func(childComplexity int) int
+ PageInfo func(childComplexity int) int
+ TotalCount func(childComplexity int) int
+ }
+
+ CommentEdge struct {
+ Cursor func(childComplexity int) int
+ Node func(childComplexity int) int
+ }
+
+ CreateOperation struct {
+ Author func(childComplexity int) int
+ Date func(childComplexity int) int
+ Title func(childComplexity int) int
+ Message func(childComplexity int) int
+ Files func(childComplexity int) int
+ }
+
+ LabelChangeOperation struct {
+ Author func(childComplexity int) int
+ Date func(childComplexity int) int
+ Added func(childComplexity int) int
+ Removed func(childComplexity int) int
+ }
+
+ Mutation struct {
+ NewBug func(childComplexity int, repoRef *string, title string, message string, files []git.Hash) int
+ AddComment func(childComplexity int, repoRef *string, prefix string, message string, files []git.Hash) int
+ ChangeLabels func(childComplexity int, repoRef *string, prefix string, added []string, removed []string) int
+ Open func(childComplexity int, repoRef *string, prefix string) int
+ Close func(childComplexity int, repoRef *string, prefix string) int
+ SetTitle func(childComplexity int, repoRef *string, prefix string, title string) int
+ Commit func(childComplexity int, repoRef *string, prefix string) int
+ }
+
+ OperationConnection struct {
+ Edges func(childComplexity int) int
+ Nodes func(childComplexity int) int
+ PageInfo func(childComplexity int) int
+ TotalCount func(childComplexity int) int
+ }
+
+ OperationEdge struct {
+ Cursor func(childComplexity int) int
+ Node func(childComplexity int) int
+ }
+
+ PageInfo struct {
+ HasNextPage func(childComplexity int) int
+ HasPreviousPage func(childComplexity int) int
+ StartCursor func(childComplexity int) int
+ EndCursor func(childComplexity int) int
+ }
+
+ Person struct {
+ Email func(childComplexity int) int
+ Name func(childComplexity int) int
+ }
+
+ Query struct {
+ DefaultRepository func(childComplexity int) int
+ Repository func(childComplexity int, id string) int
+ }
+
+ Repository struct {
+ AllBugs func(childComplexity int, after *string, before *string, first *int, last *int, query *string) int
+ Bug func(childComplexity int, prefix string) int
+ }
+
+ SetStatusOperation struct {
+ Author func(childComplexity int) int
+ Date func(childComplexity int) int
+ Status func(childComplexity int) int
+ }
+
+ SetTitleOperation struct {
+ Author func(childComplexity int) int
+ Date func(childComplexity int) int
+ Title func(childComplexity int) int
+ Was func(childComplexity int) int
+ }
+}
+
type AddCommentOperationResolver interface {
Date(ctx context.Context, obj *operations.AddCommentOperation) (time.Time, error)
}
@@ -114,107 +215,1149 @@ type SetTitleOperationResolver interface {
Date(ctx context.Context, obj *operations.SetTitleOperation) (time.Time, error)
}
-type shortMapper struct {
- r ResolverRoot
-}
+func field_Bug_comments_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
-func (s shortMapper) AddCommentOperation_date(ctx context.Context, obj *operations.AddCommentOperation) (time.Time, error) {
- return s.r.AddCommentOperation().Date(ctx, obj)
-}
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg1 = &ptr1
+ }
-func (s shortMapper) Bug_status(ctx context.Context, obj *bug.Snapshot) (models.Status, error) {
- return s.r.Bug().Status(ctx, obj)
-}
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ var err error
+ var ptr1 int
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalInt(tmp)
+ arg2 = &ptr1
+ }
-func (s shortMapper) Bug_lastEdit(ctx context.Context, obj *bug.Snapshot) (time.Time, error) {
- return s.r.Bug().LastEdit(ctx, obj)
-}
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ var err error
+ var ptr1 int
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalInt(tmp)
+ arg3 = &ptr1
+ }
-func (s shortMapper) Bug_comments(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.CommentConnection, error) {
- return s.r.Bug().Comments(ctx, obj, after, before, first, last)
-}
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ return args, nil
-func (s shortMapper) Bug_operations(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.OperationConnection, error) {
- return s.r.Bug().Operations(ctx, obj, after, before, first, last)
}
-func (s shortMapper) CreateOperation_date(ctx context.Context, obj *operations.CreateOperation) (time.Time, error) {
- return s.r.CreateOperation().Date(ctx, obj)
-}
+func field_Bug_operations_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
-func (s shortMapper) LabelChangeOperation_date(ctx context.Context, obj *operations.LabelChangeOperation) (time.Time, error) {
- return s.r.LabelChangeOperation().Date(ctx, obj)
-}
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg1 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ var err error
+ var ptr1 int
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalInt(tmp)
+ arg2 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ var err error
+ var ptr1 int
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalInt(tmp)
+ arg3 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ return args, nil
-func (s shortMapper) Mutation_newBug(ctx context.Context, repoRef *string, title string, message string, files []git.Hash) (bug.Snapshot, error) {
- return s.r.Mutation().NewBug(ctx, repoRef, title, message, files)
}
-func (s shortMapper) Mutation_addComment(ctx context.Context, repoRef *string, prefix string, message string, files []git.Hash) (bug.Snapshot, error) {
- return s.r.Mutation().AddComment(ctx, repoRef, prefix, message, files)
+func field_Mutation_newBug_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["title"]; ok {
+ var err error
+ arg1, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["title"] = arg1
+ var arg2 string
+ if tmp, ok := rawArgs["message"]; ok {
+ var err error
+ arg2, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["message"] = arg2
+ var arg3 []git.Hash
+ if tmp, ok := rawArgs["files"]; ok {
+ var err error
+ var rawIf1 []interface{}
+ if tmp != nil {
+ if tmp1, ok := tmp.([]interface{}); ok {
+ rawIf1 = tmp1
+ } else {
+ rawIf1 = []interface{}{tmp}
+ }
+ }
+ arg3 = make([]git.Hash, len(rawIf1))
+ for idx1 := range rawIf1 {
+ err = (&arg3[idx1]).UnmarshalGQL(rawIf1[idx1])
+ }
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["files"] = arg3
+ return args, nil
+
}
-func (s shortMapper) Mutation_changeLabels(ctx context.Context, repoRef *string, prefix string, added []string, removed []string) (bug.Snapshot, error) {
- return s.r.Mutation().ChangeLabels(ctx, repoRef, prefix, added, removed)
+func field_Mutation_addComment_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ var err error
+ arg1, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ var arg2 string
+ if tmp, ok := rawArgs["message"]; ok {
+ var err error
+ arg2, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["message"] = arg2
+ var arg3 []git.Hash
+ if tmp, ok := rawArgs["files"]; ok {
+ var err error
+ var rawIf1 []interface{}
+ if tmp != nil {
+ if tmp1, ok := tmp.([]interface{}); ok {
+ rawIf1 = tmp1
+ } else {
+ rawIf1 = []interface{}{tmp}
+ }
+ }
+ arg3 = make([]git.Hash, len(rawIf1))
+ for idx1 := range rawIf1 {
+ err = (&arg3[idx1]).UnmarshalGQL(rawIf1[idx1])
+ }
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["files"] = arg3
+ return args, nil
+
}
-func (s shortMapper) Mutation_open(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error) {
- return s.r.Mutation().Open(ctx, repoRef, prefix)
+func field_Mutation_changeLabels_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ var err error
+ arg1, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ var arg2 []string
+ if tmp, ok := rawArgs["added"]; ok {
+ var err error
+ var rawIf1 []interface{}
+ if tmp != nil {
+ if tmp1, ok := tmp.([]interface{}); ok {
+ rawIf1 = tmp1
+ } else {
+ rawIf1 = []interface{}{tmp}
+ }
+ }
+ arg2 = make([]string, len(rawIf1))
+ for idx1 := range rawIf1 {
+ arg2[idx1], err = graphql.UnmarshalString(rawIf1[idx1])
+ }
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["added"] = arg2
+ var arg3 []string
+ if tmp, ok := rawArgs["removed"]; ok {
+ var err error
+ var rawIf1 []interface{}
+ if tmp != nil {
+ if tmp1, ok := tmp.([]interface{}); ok {
+ rawIf1 = tmp1
+ } else {
+ rawIf1 = []interface{}{tmp}
+ }
+ }
+ arg3 = make([]string, len(rawIf1))
+ for idx1 := range rawIf1 {
+ arg3[idx1], err = graphql.UnmarshalString(rawIf1[idx1])
+ }
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["removed"] = arg3
+ return args, nil
+
}
-func (s shortMapper) Mutation_close(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error) {
- return s.r.Mutation().Close(ctx, repoRef, prefix)
+func field_Mutation_open_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ var err error
+ arg1, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ return args, nil
+
}
-func (s shortMapper) Mutation_setTitle(ctx context.Context, repoRef *string, prefix string, title string) (bug.Snapshot, error) {
- return s.r.Mutation().SetTitle(ctx, repoRef, prefix, title)
+func field_Mutation_close_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ var err error
+ arg1, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ return args, nil
+
}
-func (s shortMapper) Mutation_commit(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error) {
- return s.r.Mutation().Commit(ctx, repoRef, prefix)
+func field_Mutation_setTitle_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ var err error
+ arg1, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ var arg2 string
+ if tmp, ok := rawArgs["title"]; ok {
+ var err error
+ arg2, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["title"] = arg2
+ return args, nil
+
}
-func (s shortMapper) Query_defaultRepository(ctx context.Context) (*models.Repository, error) {
- return s.r.Query().DefaultRepository(ctx)
+func field_Mutation_commit_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ var err error
+ arg1, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ return args, nil
+
}
-func (s shortMapper) Query_repository(ctx context.Context, id string) (*models.Repository, error) {
- return s.r.Query().Repository(ctx, id)
+func field_Query_repository_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 string
+ if tmp, ok := rawArgs["id"]; ok {
+ var err error
+ arg0, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["id"] = arg0
+ return args, nil
+
}
-func (s shortMapper) Repository_allBugs(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int, query *string) (models.BugConnection, error) {
- return s.r.Repository().AllBugs(ctx, obj, after, before, first, last, query)
+func field_Query___type_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 string
+ if tmp, ok := rawArgs["name"]; ok {
+ var err error
+ arg0, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["name"] = arg0
+ return args, nil
+
}
-func (s shortMapper) Repository_bug(ctx context.Context, obj *models.Repository, prefix string) (*bug.Snapshot, error) {
- return s.r.Repository().Bug(ctx, obj, prefix)
+func field_Repository_allBugs_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg0 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg1 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ var err error
+ var ptr1 int
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalInt(tmp)
+ arg2 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ var err error
+ var ptr1 int
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalInt(tmp)
+ arg3 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ var arg4 *string
+ if tmp, ok := rawArgs["query"]; ok {
+ var err error
+ var ptr1 string
+ if tmp != nil {
+ ptr1, err = graphql.UnmarshalString(tmp)
+ arg4 = &ptr1
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["query"] = arg4
+ return args, nil
+
}
-func (s shortMapper) SetStatusOperation_date(ctx context.Context, obj *operations.SetStatusOperation) (time.Time, error) {
- return s.r.SetStatusOperation().Date(ctx, obj)
+func field_Repository_bug_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ var err error
+ arg0, err = graphql.UnmarshalString(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg0
+ return args, nil
+
}
-func (s shortMapper) SetStatusOperation_status(ctx context.Context, obj *operations.SetStatusOperation) (models.Status, error) {
- return s.r.SetStatusOperation().Status(ctx, obj)
+func field___Type_fields_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 bool
+ if tmp, ok := rawArgs["includeDeprecated"]; ok {
+ var err error
+ arg0, err = graphql.UnmarshalBoolean(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["includeDeprecated"] = arg0
+ return args, nil
+
}
-func (s shortMapper) SetTitleOperation_date(ctx context.Context, obj *operations.SetTitleOperation) (time.Time, error) {
- return s.r.SetTitleOperation().Date(ctx, obj)
+func field___Type_enumValues_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ args := map[string]interface{}{}
+ var arg0 bool
+ if tmp, ok := rawArgs["includeDeprecated"]; ok {
+ var err error
+ arg0, err = graphql.UnmarshalBoolean(tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["includeDeprecated"] = arg0
+ return args, nil
+
}
type executableSchema struct {
- resolvers Resolvers
+ resolvers ResolverRoot
+ directives DirectiveRoot
+ complexity ComplexityRoot
}
-func (e *executableSchema) Schema() *schema.Schema {
+func (e *executableSchema) Schema() *ast.Schema {
return parsedSchema
}
-func (e *executableSchema) Query(ctx context.Context, op *query.Operation) *graphql.Response {
- ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
+func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
+ switch typeName + "." + field {
+
+ case "AddCommentOperation.author":
+ if e.complexity.AddCommentOperation.Author == nil {
+ break
+ }
+
+ return e.complexity.AddCommentOperation.Author(childComplexity), true
+
+ case "AddCommentOperation.date":
+ if e.complexity.AddCommentOperation.Date == nil {
+ break
+ }
+
+ return e.complexity.AddCommentOperation.Date(childComplexity), true
+
+ case "AddCommentOperation.message":
+ if e.complexity.AddCommentOperation.Message == nil {
+ break
+ }
+
+ return e.complexity.AddCommentOperation.Message(childComplexity), true
+
+ case "AddCommentOperation.files":
+ if e.complexity.AddCommentOperation.Files == nil {
+ break
+ }
+
+ return e.complexity.AddCommentOperation.Files(childComplexity), true
+
+ case "Bug.id":
+ if e.complexity.Bug.Id == nil {
+ break
+ }
+
+ return e.complexity.Bug.Id(childComplexity), true
+
+ case "Bug.humanId":
+ if e.complexity.Bug.HumanId == nil {
+ break
+ }
+
+ return e.complexity.Bug.HumanId(childComplexity), true
+
+ case "Bug.status":
+ if e.complexity.Bug.Status == nil {
+ break
+ }
+
+ return e.complexity.Bug.Status(childComplexity), true
+
+ case "Bug.title":
+ if e.complexity.Bug.Title == nil {
+ break
+ }
+
+ return e.complexity.Bug.Title(childComplexity), true
+
+ case "Bug.labels":
+ if e.complexity.Bug.Labels == nil {
+ break
+ }
+
+ return e.complexity.Bug.Labels(childComplexity), true
+
+ case "Bug.author":
+ if e.complexity.Bug.Author == nil {
+ break
+ }
+
+ return e.complexity.Bug.Author(childComplexity), true
+
+ case "Bug.createdAt":
+ if e.complexity.Bug.CreatedAt == nil {
+ break
+ }
+
+ return e.complexity.Bug.CreatedAt(childComplexity), true
+
+ case "Bug.lastEdit":
+ if e.complexity.Bug.LastEdit == nil {
+ break
+ }
+
+ return e.complexity.Bug.LastEdit(childComplexity), true
+
+ case "Bug.comments":
+ if e.complexity.Bug.Comments == nil {
+ break
+ }
+
+ args, err := field_Bug_comments_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Bug.Comments(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
+
+ case "Bug.operations":
+ if e.complexity.Bug.Operations == nil {
+ break
+ }
+
+ args, err := field_Bug_operations_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Bug.Operations(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
+
+ case "BugConnection.edges":
+ if e.complexity.BugConnection.Edges == nil {
+ break
+ }
+
+ return e.complexity.BugConnection.Edges(childComplexity), true
+
+ case "BugConnection.nodes":
+ if e.complexity.BugConnection.Nodes == nil {
+ break
+ }
+
+ return e.complexity.BugConnection.Nodes(childComplexity), true
+
+ case "BugConnection.pageInfo":
+ if e.complexity.BugConnection.PageInfo == nil {
+ break
+ }
+
+ return e.complexity.BugConnection.PageInfo(childComplexity), true
+
+ case "BugConnection.totalCount":
+ if e.complexity.BugConnection.TotalCount == nil {
+ break
+ }
+
+ return e.complexity.BugConnection.TotalCount(childComplexity), true
+
+ case "BugEdge.cursor":
+ if e.complexity.BugEdge.Cursor == nil {
+ break
+ }
+
+ return e.complexity.BugEdge.Cursor(childComplexity), true
+
+ case "BugEdge.node":
+ if e.complexity.BugEdge.Node == nil {
+ break
+ }
+
+ return e.complexity.BugEdge.Node(childComplexity), true
+
+ case "Comment.author":
+ if e.complexity.Comment.Author == nil {
+ break
+ }
+
+ return e.complexity.Comment.Author(childComplexity), true
+
+ case "Comment.message":
+ if e.complexity.Comment.Message == nil {
+ break
+ }
+
+ return e.complexity.Comment.Message(childComplexity), true
+
+ case "Comment.files":
+ if e.complexity.Comment.Files == nil {
+ break
+ }
+
+ return e.complexity.Comment.Files(childComplexity), true
+
+ case "CommentConnection.edges":
+ if e.complexity.CommentConnection.Edges == nil {
+ break
+ }
+
+ return e.complexity.CommentConnection.Edges(childComplexity), true
+
+ case "CommentConnection.nodes":
+ if e.complexity.CommentConnection.Nodes == nil {
+ break
+ }
+
+ return e.complexity.CommentConnection.Nodes(childComplexity), true
+
+ case "CommentConnection.pageInfo":
+ if e.complexity.CommentConnection.PageInfo == nil {
+ break
+ }
+
+ return e.complexity.CommentConnection.PageInfo(childComplexity), true
+
+ case "CommentConnection.totalCount":
+ if e.complexity.CommentConnection.TotalCount == nil {
+ break
+ }
+
+ return e.complexity.CommentConnection.TotalCount(childComplexity), true
+
+ case "CommentEdge.cursor":
+ if e.complexity.CommentEdge.Cursor == nil {
+ break
+ }
+
+ return e.complexity.CommentEdge.Cursor(childComplexity), true
+
+ case "CommentEdge.node":
+ if e.complexity.CommentEdge.Node == nil {
+ break
+ }
+
+ return e.complexity.CommentEdge.Node(childComplexity), true
+
+ case "CreateOperation.author":
+ if e.complexity.CreateOperation.Author == nil {
+ break
+ }
+
+ return e.complexity.CreateOperation.Author(childComplexity), true
+
+ case "CreateOperation.date":
+ if e.complexity.CreateOperation.Date == nil {
+ break
+ }
+
+ return e.complexity.CreateOperation.Date(childComplexity), true
+
+ case "CreateOperation.title":
+ if e.complexity.CreateOperation.Title == nil {
+ break
+ }
+
+ return e.complexity.CreateOperation.Title(childComplexity), true
+
+ case "CreateOperation.message":
+ if e.complexity.CreateOperation.Message == nil {
+ break
+ }
+
+ return e.complexity.CreateOperation.Message(childComplexity), true
+
+ case "CreateOperation.files":
+ if e.complexity.CreateOperation.Files == nil {
+ break
+ }
+
+ return e.complexity.CreateOperation.Files(childComplexity), true
+
+ case "LabelChangeOperation.author":
+ if e.complexity.LabelChangeOperation.Author == nil {
+ break
+ }
+
+ return e.complexity.LabelChangeOperation.Author(childComplexity), true
+
+ case "LabelChangeOperation.date":
+ if e.complexity.LabelChangeOperation.Date == nil {
+ break
+ }
+
+ return e.complexity.LabelChangeOperation.Date(childComplexity), true
+
+ case "LabelChangeOperation.added":
+ if e.complexity.LabelChangeOperation.Added == nil {
+ break
+ }
+
+ return e.complexity.LabelChangeOperation.Added(childComplexity), true
+
+ case "LabelChangeOperation.removed":
+ if e.complexity.LabelChangeOperation.Removed == nil {
+ break
+ }
+
+ return e.complexity.LabelChangeOperation.Removed(childComplexity), true
+
+ case "Mutation.newBug":
+ if e.complexity.Mutation.NewBug == nil {
+ break
+ }
+
+ args, err := field_Mutation_newBug_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Mutation.NewBug(childComplexity, args["repoRef"].(*string), args["title"].(string), args["message"].(string), args["files"].([]git.Hash)), true
+
+ case "Mutation.addComment":
+ if e.complexity.Mutation.AddComment == nil {
+ break
+ }
+
+ args, err := field_Mutation_addComment_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Mutation.AddComment(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["message"].(string), args["files"].([]git.Hash)), true
+
+ case "Mutation.changeLabels":
+ if e.complexity.Mutation.ChangeLabels == nil {
+ break
+ }
+
+ args, err := field_Mutation_changeLabels_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Mutation.ChangeLabels(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["added"].([]string), args["removed"].([]string)), true
+
+ case "Mutation.open":
+ if e.complexity.Mutation.Open == nil {
+ break
+ }
+
+ args, err := field_Mutation_open_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Mutation.Open(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
+
+ case "Mutation.close":
+ if e.complexity.Mutation.Close == nil {
+ break
+ }
+
+ args, err := field_Mutation_close_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Mutation.Close(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
+
+ case "Mutation.setTitle":
+ if e.complexity.Mutation.SetTitle == nil {
+ break
+ }
+
+ args, err := field_Mutation_setTitle_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Mutation.SetTitle(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["title"].(string)), true
+
+ case "Mutation.commit":
+ if e.complexity.Mutation.Commit == nil {
+ break
+ }
+
+ args, err := field_Mutation_commit_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Mutation.Commit(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
+
+ case "OperationConnection.edges":
+ if e.complexity.OperationConnection.Edges == nil {
+ break
+ }
+
+ return e.complexity.OperationConnection.Edges(childComplexity), true
+
+ case "OperationConnection.nodes":
+ if e.complexity.OperationConnection.Nodes == nil {
+ break
+ }
+
+ return e.complexity.OperationConnection.Nodes(childComplexity), true
+
+ case "OperationConnection.pageInfo":
+ if e.complexity.OperationConnection.PageInfo == nil {
+ break
+ }
+
+ return e.complexity.OperationConnection.PageInfo(childComplexity), true
+
+ case "OperationConnection.totalCount":
+ if e.complexity.OperationConnection.TotalCount == nil {
+ break
+ }
+
+ return e.complexity.OperationConnection.TotalCount(childComplexity), true
+
+ case "OperationEdge.cursor":
+ if e.complexity.OperationEdge.Cursor == nil {
+ break
+ }
+
+ return e.complexity.OperationEdge.Cursor(childComplexity), true
+
+ case "OperationEdge.node":
+ if e.complexity.OperationEdge.Node == nil {
+ break
+ }
+
+ return e.complexity.OperationEdge.Node(childComplexity), true
+
+ case "PageInfo.hasNextPage":
+ if e.complexity.PageInfo.HasNextPage == nil {
+ break
+ }
+
+ return e.complexity.PageInfo.HasNextPage(childComplexity), true
+
+ case "PageInfo.hasPreviousPage":
+ if e.complexity.PageInfo.HasPreviousPage == nil {
+ break
+ }
+
+ return e.complexity.PageInfo.HasPreviousPage(childComplexity), true
+
+ case "PageInfo.startCursor":
+ if e.complexity.PageInfo.StartCursor == nil {
+ break
+ }
+
+ return e.complexity.PageInfo.StartCursor(childComplexity), true
+
+ case "PageInfo.endCursor":
+ if e.complexity.PageInfo.EndCursor == nil {
+ break
+ }
+
+ return e.complexity.PageInfo.EndCursor(childComplexity), true
+
+ case "Person.email":
+ if e.complexity.Person.Email == nil {
+ break
+ }
+
+ return e.complexity.Person.Email(childComplexity), true
+
+ case "Person.name":
+ if e.complexity.Person.Name == nil {
+ break
+ }
+
+ return e.complexity.Person.Name(childComplexity), true
+
+ case "Query.defaultRepository":
+ if e.complexity.Query.DefaultRepository == nil {
+ break
+ }
+
+ return e.complexity.Query.DefaultRepository(childComplexity), true
+
+ case "Query.repository":
+ if e.complexity.Query.Repository == nil {
+ break
+ }
+
+ args, err := field_Query_repository_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Query.Repository(childComplexity, args["id"].(string)), true
+
+ case "Repository.allBugs":
+ if e.complexity.Repository.AllBugs == nil {
+ break
+ }
+
+ args, err := field_Repository_allBugs_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Repository.AllBugs(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int), args["query"].(*string)), true
+
+ case "Repository.bug":
+ if e.complexity.Repository.Bug == nil {
+ break
+ }
+
+ args, err := field_Repository_bug_args(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+
+ return e.complexity.Repository.Bug(childComplexity, args["prefix"].(string)), true
+
+ case "SetStatusOperation.author":
+ if e.complexity.SetStatusOperation.Author == nil {
+ break
+ }
+
+ return e.complexity.SetStatusOperation.Author(childComplexity), true
+
+ case "SetStatusOperation.date":
+ if e.complexity.SetStatusOperation.Date == nil {
+ break
+ }
+
+ return e.complexity.SetStatusOperation.Date(childComplexity), true
+
+ case "SetStatusOperation.status":
+ if e.complexity.SetStatusOperation.Status == nil {
+ break
+ }
+
+ return e.complexity.SetStatusOperation.Status(childComplexity), true
+
+ case "SetTitleOperation.author":
+ if e.complexity.SetTitleOperation.Author == nil {
+ break
+ }
+
+ return e.complexity.SetTitleOperation.Author(childComplexity), true
+
+ case "SetTitleOperation.date":
+ if e.complexity.SetTitleOperation.Date == nil {
+ break
+ }
+
+ return e.complexity.SetTitleOperation.Date(childComplexity), true
+
+ case "SetTitleOperation.title":
+ if e.complexity.SetTitleOperation.Title == nil {
+ break
+ }
+
+ return e.complexity.SetTitleOperation.Title(childComplexity), true
+
+ case "SetTitleOperation.was":
+ if e.complexity.SetTitleOperation.Was == nil {
+ break
+ }
+
+ return e.complexity.SetTitleOperation.Was(childComplexity), true
+
+ }
+ return 0, false
+}
+
+func (e *executableSchema) Query(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
+ ec := executionContext{graphql.GetRequestContext(ctx), e}
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- data := ec._Query(ctx, op.Selections)
+ data := ec._Query(ctx, op.SelectionSet)
var buf bytes.Buffer
data.MarshalGQL(&buf)
return buf.Bytes()
@@ -226,11 +1369,11 @@ func (e *executableSchema) Query(ctx context.Context, op *query.Operation) *grap
}
}
-func (e *executableSchema) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {
- ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
+func (e *executableSchema) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
+ ec := executionContext{graphql.GetRequestContext(ctx), e}
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- data := ec._Mutation(ctx, op.Selections)
+ data := ec._Mutation(ctx, op.SelectionSet)
var buf bytes.Buffer
data.MarshalGQL(&buf)
return buf.Bytes()
@@ -242,23 +1385,24 @@ func (e *executableSchema) Mutation(ctx context.Context, op *query.Operation) *g
}
}
-func (e *executableSchema) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {
+func (e *executableSchema) Subscription(ctx context.Context, op *ast.OperationDefinition) func() *graphql.Response {
return graphql.OneShot(graphql.ErrorResponse(ctx, "subscriptions are not supported"))
}
type executionContext struct {
*graphql.RequestContext
-
- resolvers Resolvers
+ *executableSchema
}
var addCommentOperationImplementors = []string{"AddCommentOperation", "Operation", "Authored"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _AddCommentOperation(ctx context.Context, sel []query.Selection, obj *operations.AddCommentOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, addCommentOperationImplementors, ec.Variables)
+func (ec *executionContext) _AddCommentOperation(ctx context.Context, sel ast.SelectionSet, obj *operations.AddCommentOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, addCommentOperationImplementors)
+ var wg sync.WaitGroup
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -267,99 +1411,146 @@ func (ec *executionContext) _AddCommentOperation(ctx context.Context, sel []quer
out.Values[i] = graphql.MarshalString("AddCommentOperation")
case "author":
out.Values[i] = ec._AddCommentOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "date":
- out.Values[i] = ec._AddCommentOperation_date(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._AddCommentOperation_date(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
case "message":
out.Values[i] = ec._AddCommentOperation_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "files":
out.Values[i] = ec._AddCommentOperation_files(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
-
+ wg.Wait()
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _AddCommentOperation_author(ctx context.Context, field graphql.CollectedField, obj *operations.AddCommentOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "AddCommentOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Author
+ rctx := &graphql.ResolverContext{
+ Object: "AddCommentOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Author, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Person)
+ rctx.Result = res
+
return ec._Person(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _AddCommentOperation_date(ctx context.Context, field graphql.CollectedField, obj *operations.AddCommentOperation) graphql.Marshaler {
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "AddCommentOperation",
Args: nil,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.AddCommentOperation().Date(ctx, obj)
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.AddCommentOperation_date(ctx, obj)
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(time.Time)
- return graphql.MarshalTime(res)
- })
+ return graphql.Null
+ }
+ res := resTmp.(time.Time)
+ rctx.Result = res
+ return graphql.MarshalTime(res)
}
+// nolint: vetshadow
func (ec *executionContext) _AddCommentOperation_message(ctx context.Context, field graphql.CollectedField, obj *operations.AddCommentOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "AddCommentOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Message
+ rctx := &graphql.ResolverContext{
+ Object: "AddCommentOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Message, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _AddCommentOperation_files(ctx context.Context, field graphql.CollectedField, obj *operations.AddCommentOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "AddCommentOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Files
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "AddCommentOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Files, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]git.Hash)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
+ arr1[idx1] = func() graphql.Marshaler {
return res[idx1]
- }())
+ }()
}
+
return arr1
}
var bugImplementors = []string{"Bug"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Bug(ctx context.Context, sel []query.Selection, obj *bug.Snapshot) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, bugImplementors, ec.Variables)
+func (ec *executionContext) _Bug(ctx context.Context, sel ast.SelectionSet, obj *bug.Snapshot) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, bugImplementors)
+ var wg sync.WaitGroup
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -368,356 +1559,333 @@ func (ec *executionContext) _Bug(ctx context.Context, sel []query.Selection, obj
out.Values[i] = graphql.MarshalString("Bug")
case "id":
out.Values[i] = ec._Bug_id(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "humanId":
out.Values[i] = ec._Bug_humanId(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "status":
- out.Values[i] = ec._Bug_status(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._Bug_status(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
case "title":
out.Values[i] = ec._Bug_title(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "labels":
out.Values[i] = ec._Bug_labels(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "author":
out.Values[i] = ec._Bug_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "createdAt":
out.Values[i] = ec._Bug_createdAt(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "lastEdit":
- out.Values[i] = ec._Bug_lastEdit(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._Bug_lastEdit(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
case "comments":
- out.Values[i] = ec._Bug_comments(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._Bug_comments(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
case "operations":
- out.Values[i] = ec._Bug_operations(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._Bug_operations(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
-
+ wg.Wait()
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _Bug_id(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Bug"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Id()
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Id(), nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _Bug_humanId(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Bug"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.HumanId()
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.HumanId(), nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _Bug_status(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "Bug",
Args: nil,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Bug().Status(ctx, obj)
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Bug_status(ctx, obj)
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(models.Status)
- return res
- })
+ return graphql.Null
+ }
+ res := resTmp.(models.Status)
+ rctx.Result = res
+ return res
}
+// nolint: vetshadow
func (ec *executionContext) _Bug_title(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Bug"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Title
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Title, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _Bug_labels(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Bug"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Labels
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Labels, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]bug.Label)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
+ arr1[idx1] = func() graphql.Marshaler {
return res[idx1]
- }())
+ }()
}
+
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) _Bug_author(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Bug"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Author
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Author, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Person)
+ rctx.Result = res
+
return ec._Person(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _Bug_createdAt(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Bug"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.CreatedAt
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.CreatedAt, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(time.Time)
+ rctx.Result = res
return graphql.MarshalTime(res)
}
+// nolint: vetshadow
func (ec *executionContext) _Bug_lastEdit(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "Bug",
Args: nil,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Bug().LastEdit(ctx, obj)
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Bug_lastEdit(ctx, obj)
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(time.Time)
- return graphql.MarshalTime(res)
- })
+ return graphql.Null
+ }
+ res := resTmp.(time.Time)
+ rctx.Result = res
+ return graphql.MarshalTime(res)
}
+// nolint: vetshadow
func (ec *executionContext) _Bug_comments(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := field.Args["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := field.Args["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := field.Args["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Bug_comments_args(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return graphql.Null
}
- args["last"] = arg3
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "Bug",
Args: args,
Field: field,
- })
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Bug_comments(ctx, obj, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int))
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
- }
- res := resTmp.(models.CommentConnection)
- return ec._CommentConnection(ctx, field.Selections, &res)
- })
-}
-
-func (ec *executionContext) _Bug_operations(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
}
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := field.Args["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Bug().Comments(ctx, obj, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int))
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
+ return graphql.Null
}
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := field.Args["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
+ res := resTmp.(models.CommentConnection)
+ rctx.Result = res
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := field.Args["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
+ return ec._CommentConnection(ctx, field.Selections, &res)
+}
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
+// nolint: vetshadow
+func (ec *executionContext) _Bug_operations(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Bug_operations_args(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return graphql.Null
}
- args["last"] = arg3
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "Bug",
Args: args,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Bug().Operations(ctx, obj, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int))
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Bug_operations(ctx, obj, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int))
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(models.OperationConnection)
- return ec._OperationConnection(ctx, field.Selections, &res)
- })
+ return graphql.Null
+ }
+ res := resTmp.(models.OperationConnection)
+ rctx.Result = res
+
+ return ec._OperationConnection(ctx, field.Selections, &res)
}
var bugConnectionImplementors = []string{"BugConnection"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _BugConnection(ctx context.Context, sel []query.Selection, obj *models.BugConnection) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, bugConnectionImplementors, ec.Variables)
+func (ec *executionContext) _BugConnection(ctx context.Context, sel ast.SelectionSet, obj *models.BugConnection) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, bugConnectionImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -726,89 +1894,198 @@ func (ec *executionContext) _BugConnection(ctx context.Context, sel []query.Sele
out.Values[i] = graphql.MarshalString("BugConnection")
case "edges":
out.Values[i] = ec._BugConnection_edges(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "nodes":
out.Values[i] = ec._BugConnection_nodes(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "pageInfo":
out.Values[i] = ec._BugConnection_pageInfo(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "totalCount":
out.Values[i] = ec._BugConnection_totalCount(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _BugConnection_edges(ctx context.Context, field graphql.CollectedField, obj *models.BugConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "BugConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Edges
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "BugConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Edges, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]models.BugEdge)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec._BugEdge(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec._BugEdge(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) _BugConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *models.BugConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "BugConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Nodes
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "BugConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Nodes, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]bug.Snapshot)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec._Bug(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec._Bug(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) _BugConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *models.BugConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "BugConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.PageInfo
+ rctx := &graphql.ResolverContext{
+ Object: "BugConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.PageInfo, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(models.PageInfo)
+ rctx.Result = res
+
return ec._PageInfo(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _BugConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *models.BugConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "BugConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.TotalCount
+ rctx := &graphql.ResolverContext{
+ Object: "BugConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.TotalCount, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(int)
+ rctx.Result = res
return graphql.MarshalInt(res)
}
var bugEdgeImplementors = []string{"BugEdge"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _BugEdge(ctx context.Context, sel []query.Selection, obj *models.BugEdge) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, bugEdgeImplementors, ec.Variables)
+func (ec *executionContext) _BugEdge(ctx context.Context, sel ast.SelectionSet, obj *models.BugEdge) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, bugEdgeImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -817,45 +2094,78 @@ func (ec *executionContext) _BugEdge(ctx context.Context, sel []query.Selection,
out.Values[i] = graphql.MarshalString("BugEdge")
case "cursor":
out.Values[i] = ec._BugEdge_cursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "node":
out.Values[i] = ec._BugEdge_node(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _BugEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *models.BugEdge) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "BugEdge"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Cursor
+ rctx := &graphql.ResolverContext{
+ Object: "BugEdge",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Cursor, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _BugEdge_node(ctx context.Context, field graphql.CollectedField, obj *models.BugEdge) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "BugEdge"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Node
+ rctx := &graphql.ResolverContext{
+ Object: "BugEdge",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Node, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Snapshot)
+ rctx.Result = res
+
return ec._Bug(ctx, field.Selections, &res)
}
var commentImplementors = []string{"Comment", "Authored"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Comment(ctx context.Context, sel []query.Selection, obj *bug.Comment) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, commentImplementors, ec.Variables)
+func (ec *executionContext) _Comment(ctx context.Context, sel ast.SelectionSet, obj *bug.Comment) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, commentImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -864,67 +2174,114 @@ func (ec *executionContext) _Comment(ctx context.Context, sel []query.Selection,
out.Values[i] = graphql.MarshalString("Comment")
case "author":
out.Values[i] = ec._Comment_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "message":
out.Values[i] = ec._Comment_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "files":
out.Values[i] = ec._Comment_files(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _Comment_author(ctx context.Context, field graphql.CollectedField, obj *bug.Comment) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Comment"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Author
+ rctx := &graphql.ResolverContext{
+ Object: "Comment",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Author, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Person)
+ rctx.Result = res
+
return ec._Person(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _Comment_message(ctx context.Context, field graphql.CollectedField, obj *bug.Comment) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Comment"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Message
+ rctx := &graphql.ResolverContext{
+ Object: "Comment",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Message, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _Comment_files(ctx context.Context, field graphql.CollectedField, obj *bug.Comment) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Comment"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Files
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "Comment",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Files, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]git.Hash)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
+ arr1[idx1] = func() graphql.Marshaler {
return res[idx1]
- }())
+ }()
}
+
return arr1
}
var commentConnectionImplementors = []string{"CommentConnection"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _CommentConnection(ctx context.Context, sel []query.Selection, obj *models.CommentConnection) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, commentConnectionImplementors, ec.Variables)
+func (ec *executionContext) _CommentConnection(ctx context.Context, sel ast.SelectionSet, obj *models.CommentConnection) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, commentConnectionImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -933,89 +2290,198 @@ func (ec *executionContext) _CommentConnection(ctx context.Context, sel []query.
out.Values[i] = graphql.MarshalString("CommentConnection")
case "edges":
out.Values[i] = ec._CommentConnection_edges(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "nodes":
out.Values[i] = ec._CommentConnection_nodes(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "pageInfo":
out.Values[i] = ec._CommentConnection_pageInfo(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "totalCount":
out.Values[i] = ec._CommentConnection_totalCount(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _CommentConnection_edges(ctx context.Context, field graphql.CollectedField, obj *models.CommentConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CommentConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Edges
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "CommentConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Edges, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]models.CommentEdge)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec._CommentEdge(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec._CommentEdge(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) _CommentConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *models.CommentConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CommentConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Nodes
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "CommentConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Nodes, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]bug.Comment)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec._Comment(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec._Comment(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) _CommentConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *models.CommentConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CommentConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.PageInfo
+ rctx := &graphql.ResolverContext{
+ Object: "CommentConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.PageInfo, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(models.PageInfo)
+ rctx.Result = res
+
return ec._PageInfo(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _CommentConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *models.CommentConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CommentConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.TotalCount
+ rctx := &graphql.ResolverContext{
+ Object: "CommentConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.TotalCount, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(int)
+ rctx.Result = res
return graphql.MarshalInt(res)
}
var commentEdgeImplementors = []string{"CommentEdge"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _CommentEdge(ctx context.Context, sel []query.Selection, obj *models.CommentEdge) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, commentEdgeImplementors, ec.Variables)
+func (ec *executionContext) _CommentEdge(ctx context.Context, sel ast.SelectionSet, obj *models.CommentEdge) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, commentEdgeImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -1024,45 +2490,79 @@ func (ec *executionContext) _CommentEdge(ctx context.Context, sel []query.Select
out.Values[i] = graphql.MarshalString("CommentEdge")
case "cursor":
out.Values[i] = ec._CommentEdge_cursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "node":
out.Values[i] = ec._CommentEdge_node(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _CommentEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *models.CommentEdge) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CommentEdge"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Cursor
+ rctx := &graphql.ResolverContext{
+ Object: "CommentEdge",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Cursor, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _CommentEdge_node(ctx context.Context, field graphql.CollectedField, obj *models.CommentEdge) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CommentEdge"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Node
+ rctx := &graphql.ResolverContext{
+ Object: "CommentEdge",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Node, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Comment)
+ rctx.Result = res
+
return ec._Comment(ctx, field.Selections, &res)
}
var createOperationImplementors = []string{"CreateOperation", "Operation", "Authored"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _CreateOperation(ctx context.Context, sel []query.Selection, obj *operations.CreateOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, createOperationImplementors, ec.Variables)
+func (ec *executionContext) _CreateOperation(ctx context.Context, sel ast.SelectionSet, obj *operations.CreateOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, createOperationImplementors)
+ var wg sync.WaitGroup
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -1071,112 +2571,173 @@ func (ec *executionContext) _CreateOperation(ctx context.Context, sel []query.Se
out.Values[i] = graphql.MarshalString("CreateOperation")
case "author":
out.Values[i] = ec._CreateOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "date":
- out.Values[i] = ec._CreateOperation_date(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._CreateOperation_date(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
case "title":
out.Values[i] = ec._CreateOperation_title(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "message":
out.Values[i] = ec._CreateOperation_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "files":
out.Values[i] = ec._CreateOperation_files(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
-
+ wg.Wait()
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _CreateOperation_author(ctx context.Context, field graphql.CollectedField, obj *operations.CreateOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CreateOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Author
+ rctx := &graphql.ResolverContext{
+ Object: "CreateOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Author, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Person)
+ rctx.Result = res
+
return ec._Person(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _CreateOperation_date(ctx context.Context, field graphql.CollectedField, obj *operations.CreateOperation) graphql.Marshaler {
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "CreateOperation",
Args: nil,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.CreateOperation().Date(ctx, obj)
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.CreateOperation_date(ctx, obj)
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(time.Time)
- return graphql.MarshalTime(res)
- })
+ return graphql.Null
+ }
+ res := resTmp.(time.Time)
+ rctx.Result = res
+ return graphql.MarshalTime(res)
}
+// nolint: vetshadow
func (ec *executionContext) _CreateOperation_title(ctx context.Context, field graphql.CollectedField, obj *operations.CreateOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CreateOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Title
+ rctx := &graphql.ResolverContext{
+ Object: "CreateOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Title, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _CreateOperation_message(ctx context.Context, field graphql.CollectedField, obj *operations.CreateOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CreateOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Message
+ rctx := &graphql.ResolverContext{
+ Object: "CreateOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Message, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _CreateOperation_files(ctx context.Context, field graphql.CollectedField, obj *operations.CreateOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "CreateOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Files
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "CreateOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Files, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]git.Hash)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
+ arr1[idx1] = func() graphql.Marshaler {
return res[idx1]
- }())
+ }()
}
+
return arr1
}
var labelChangeOperationImplementors = []string{"LabelChangeOperation", "Operation", "Authored"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _LabelChangeOperation(ctx context.Context, sel []query.Selection, obj *operations.LabelChangeOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, labelChangeOperationImplementors, ec.Variables)
+func (ec *executionContext) _LabelChangeOperation(ctx context.Context, sel ast.SelectionSet, obj *operations.LabelChangeOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, labelChangeOperationImplementors)
+ var wg sync.WaitGroup
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -1185,112 +2746,158 @@ func (ec *executionContext) _LabelChangeOperation(ctx context.Context, sel []que
out.Values[i] = graphql.MarshalString("LabelChangeOperation")
case "author":
out.Values[i] = ec._LabelChangeOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "date":
- out.Values[i] = ec._LabelChangeOperation_date(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._LabelChangeOperation_date(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
case "added":
out.Values[i] = ec._LabelChangeOperation_added(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "removed":
out.Values[i] = ec._LabelChangeOperation_removed(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
-
+ wg.Wait()
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _LabelChangeOperation_author(ctx context.Context, field graphql.CollectedField, obj *operations.LabelChangeOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "LabelChangeOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Author
+ rctx := &graphql.ResolverContext{
+ Object: "LabelChangeOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Author, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Person)
+ rctx.Result = res
+
return ec._Person(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _LabelChangeOperation_date(ctx context.Context, field graphql.CollectedField, obj *operations.LabelChangeOperation) graphql.Marshaler {
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "LabelChangeOperation",
Args: nil,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.LabelChangeOperation().Date(ctx, obj)
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.LabelChangeOperation_date(ctx, obj)
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(time.Time)
- return graphql.MarshalTime(res)
- })
+ return graphql.Null
+ }
+ res := resTmp.(time.Time)
+ rctx.Result = res
+ return graphql.MarshalTime(res)
}
+// nolint: vetshadow
func (ec *executionContext) _LabelChangeOperation_added(ctx context.Context, field graphql.CollectedField, obj *operations.LabelChangeOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "LabelChangeOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Added
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "LabelChangeOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Added, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]bug.Label)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
+ arr1[idx1] = func() graphql.Marshaler {
return res[idx1]
- }())
+ }()
}
+
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) _LabelChangeOperation_removed(ctx context.Context, field graphql.CollectedField, obj *operations.LabelChangeOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "LabelChangeOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Removed
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "LabelChangeOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Removed, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]bug.Label)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
+ arr1[idx1] = func() graphql.Marshaler {
return res[idx1]
- }())
+ }()
}
+
return arr1
}
var mutationImplementors = []string{"Mutation"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Mutation(ctx context.Context, sel []query.Selection) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, mutationImplementors, ec.Variables)
+func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, mutationImplementors)
ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
Object: "Mutation",
})
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -1299,468 +2906,261 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel []query.Selection
out.Values[i] = graphql.MarshalString("Mutation")
case "newBug":
out.Values[i] = ec._Mutation_newBug(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "addComment":
out.Values[i] = ec._Mutation_addComment(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "changeLabels":
out.Values[i] = ec._Mutation_changeLabels(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "open":
out.Values[i] = ec._Mutation_open(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "close":
out.Values[i] = ec._Mutation_close(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "setTitle":
out.Values[i] = ec._Mutation_setTitle(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "commit":
out.Values[i] = ec._Mutation_commit(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _Mutation_newBug(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := field.Args["title"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["title"] = arg1
- var arg2 string
- if tmp, ok := field.Args["message"]; ok {
- var err error
- arg2, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["message"] = arg2
- var arg3 []git.Hash
- if tmp, ok := field.Args["files"]; ok {
- var err error
- var rawIf1 []interface{}
- if tmp != nil {
- if tmp1, ok := tmp.([]interface{}); ok {
- rawIf1 = tmp1
- }
- }
- arg3 = make([]git.Hash, len(rawIf1))
- for idx1 := range rawIf1 {
- err = (&arg3[idx1]).UnmarshalGQL(rawIf1[idx1])
- }
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["files"] = arg3
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Mutation"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Mutation_newBug(ctx, args["repoRef"].(*string), args["title"].(string), args["message"].(string), args["files"].([]git.Hash))
- })
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Mutation_newBug_args(rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Mutation().NewBug(ctx, args["repoRef"].(*string), args["title"].(string), args["message"].(string), args["files"].([]git.Hash))
+ })
if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
return graphql.Null
}
res := resTmp.(bug.Snapshot)
+ rctx.Result = res
+
return ec._Bug(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _Mutation_addComment(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := field.Args["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["prefix"] = arg1
- var arg2 string
- if tmp, ok := field.Args["message"]; ok {
- var err error
- arg2, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["message"] = arg2
- var arg3 []git.Hash
- if tmp, ok := field.Args["files"]; ok {
- var err error
- var rawIf1 []interface{}
- if tmp != nil {
- if tmp1, ok := tmp.([]interface{}); ok {
- rawIf1 = tmp1
- }
- }
- arg3 = make([]git.Hash, len(rawIf1))
- for idx1 := range rawIf1 {
- err = (&arg3[idx1]).UnmarshalGQL(rawIf1[idx1])
- }
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["files"] = arg3
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Mutation"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Mutation_addComment(ctx, args["repoRef"].(*string), args["prefix"].(string), args["message"].(string), args["files"].([]git.Hash))
- })
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Mutation_addComment_args(rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Mutation().AddComment(ctx, args["repoRef"].(*string), args["prefix"].(string), args["message"].(string), args["files"].([]git.Hash))
+ })
if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
return graphql.Null
}
res := resTmp.(bug.Snapshot)
+ rctx.Result = res
+
return ec._Bug(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _Mutation_changeLabels(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := field.Args["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["prefix"] = arg1
- var arg2 []string
- if tmp, ok := field.Args["added"]; ok {
- var err error
- var rawIf1 []interface{}
- if tmp != nil {
- if tmp1, ok := tmp.([]interface{}); ok {
- rawIf1 = tmp1
- }
- }
- arg2 = make([]string, len(rawIf1))
- for idx1 := range rawIf1 {
- arg2[idx1], err = graphql.UnmarshalString(rawIf1[idx1])
- }
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["added"] = arg2
- var arg3 []string
- if tmp, ok := field.Args["removed"]; ok {
- var err error
- var rawIf1 []interface{}
- if tmp != nil {
- if tmp1, ok := tmp.([]interface{}); ok {
- rawIf1 = tmp1
- }
- }
- arg3 = make([]string, len(rawIf1))
- for idx1 := range rawIf1 {
- arg3[idx1], err = graphql.UnmarshalString(rawIf1[idx1])
- }
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["removed"] = arg3
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Mutation"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Mutation_changeLabels(ctx, args["repoRef"].(*string), args["prefix"].(string), args["added"].([]string), args["removed"].([]string))
- })
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Mutation_changeLabels_args(rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Mutation().ChangeLabels(ctx, args["repoRef"].(*string), args["prefix"].(string), args["added"].([]string), args["removed"].([]string))
+ })
if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
return graphql.Null
}
res := resTmp.(bug.Snapshot)
+ rctx.Result = res
+
return ec._Bug(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _Mutation_open(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := field.Args["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["prefix"] = arg1
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Mutation"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Mutation_open(ctx, args["repoRef"].(*string), args["prefix"].(string))
- })
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Mutation_open_args(rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Mutation().Open(ctx, args["repoRef"].(*string), args["prefix"].(string))
+ })
if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
return graphql.Null
}
res := resTmp.(bug.Snapshot)
+ rctx.Result = res
+
return ec._Bug(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _Mutation_close(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := field.Args["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["prefix"] = arg1
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Mutation"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Mutation_close(ctx, args["repoRef"].(*string), args["prefix"].(string))
- })
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Mutation_close_args(rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Mutation().Close(ctx, args["repoRef"].(*string), args["prefix"].(string))
+ })
if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
return graphql.Null
}
res := resTmp.(bug.Snapshot)
+ rctx.Result = res
+
return ec._Bug(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _Mutation_setTitle(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := field.Args["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["prefix"] = arg1
- var arg2 string
- if tmp, ok := field.Args["title"]; ok {
- var err error
- arg2, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["title"] = arg2
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Mutation"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Mutation_setTitle(ctx, args["repoRef"].(*string), args["prefix"].(string), args["title"].(string))
- })
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Mutation_setTitle_args(rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Mutation().SetTitle(ctx, args["repoRef"].(*string), args["prefix"].(string), args["title"].(string))
+ })
if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
return graphql.Null
}
res := resTmp.(bug.Snapshot)
+ rctx.Result = res
+
return ec._Bug(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _Mutation_commit(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := field.Args["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["prefix"] = arg1
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Mutation"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Mutation_commit(ctx, args["repoRef"].(*string), args["prefix"].(string))
- })
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Mutation_commit_args(rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Mutation().Commit(ctx, args["repoRef"].(*string), args["prefix"].(string))
+ })
if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
return graphql.Null
}
res := resTmp.(bug.Snapshot)
+ rctx.Result = res
+
return ec._Bug(ctx, field.Selections, &res)
}
var operationConnectionImplementors = []string{"OperationConnection"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _OperationConnection(ctx context.Context, sel []query.Selection, obj *models.OperationConnection) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, operationConnectionImplementors, ec.Variables)
+func (ec *executionContext) _OperationConnection(ctx context.Context, sel ast.SelectionSet, obj *models.OperationConnection) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, operationConnectionImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -1769,89 +3169,198 @@ func (ec *executionContext) _OperationConnection(ctx context.Context, sel []quer
out.Values[i] = graphql.MarshalString("OperationConnection")
case "edges":
out.Values[i] = ec._OperationConnection_edges(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "nodes":
out.Values[i] = ec._OperationConnection_nodes(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "pageInfo":
out.Values[i] = ec._OperationConnection_pageInfo(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "totalCount":
out.Values[i] = ec._OperationConnection_totalCount(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _OperationConnection_edges(ctx context.Context, field graphql.CollectedField, obj *models.OperationConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "OperationConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Edges
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "OperationConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Edges, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]models.OperationEdge)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec._OperationEdge(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec._OperationEdge(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) _OperationConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *models.OperationConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "OperationConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Nodes
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "OperationConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Nodes, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]bug.Operation)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec._Operation(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec._Operation(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) _OperationConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *models.OperationConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "OperationConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.PageInfo
+ rctx := &graphql.ResolverContext{
+ Object: "OperationConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.PageInfo, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(models.PageInfo)
+ rctx.Result = res
+
return ec._PageInfo(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _OperationConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *models.OperationConnection) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "OperationConnection"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.TotalCount
+ rctx := &graphql.ResolverContext{
+ Object: "OperationConnection",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.TotalCount, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(int)
+ rctx.Result = res
return graphql.MarshalInt(res)
}
var operationEdgeImplementors = []string{"OperationEdge"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _OperationEdge(ctx context.Context, sel []query.Selection, obj *models.OperationEdge) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, operationEdgeImplementors, ec.Variables)
+func (ec *executionContext) _OperationEdge(ctx context.Context, sel ast.SelectionSet, obj *models.OperationEdge) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, operationEdgeImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -1860,45 +3369,78 @@ func (ec *executionContext) _OperationEdge(ctx context.Context, sel []query.Sele
out.Values[i] = graphql.MarshalString("OperationEdge")
case "cursor":
out.Values[i] = ec._OperationEdge_cursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "node":
out.Values[i] = ec._OperationEdge_node(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _OperationEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *models.OperationEdge) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "OperationEdge"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Cursor
+ rctx := &graphql.ResolverContext{
+ Object: "OperationEdge",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Cursor, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _OperationEdge_node(ctx context.Context, field graphql.CollectedField, obj *models.OperationEdge) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "OperationEdge"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Node
+ rctx := &graphql.ResolverContext{
+ Object: "OperationEdge",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Node, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Operation)
+ rctx.Result = res
+
return ec._Operation(ctx, field.Selections, &res)
}
var pageInfoImplementors = []string{"PageInfo"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _PageInfo(ctx context.Context, sel []query.Selection, obj *models.PageInfo) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, pageInfoImplementors, ec.Variables)
+func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, obj *models.PageInfo) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, pageInfoImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -1907,71 +3449,131 @@ func (ec *executionContext) _PageInfo(ctx context.Context, sel []query.Selection
out.Values[i] = graphql.MarshalString("PageInfo")
case "hasNextPage":
out.Values[i] = ec._PageInfo_hasNextPage(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "hasPreviousPage":
out.Values[i] = ec._PageInfo_hasPreviousPage(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "startCursor":
out.Values[i] = ec._PageInfo_startCursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "endCursor":
out.Values[i] = ec._PageInfo_endCursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField, obj *models.PageInfo) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "PageInfo"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.HasNextPage
+ rctx := &graphql.ResolverContext{
+ Object: "PageInfo",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.HasNextPage, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bool)
+ rctx.Result = res
return graphql.MarshalBoolean(res)
}
+// nolint: vetshadow
func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField, obj *models.PageInfo) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "PageInfo"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.HasPreviousPage
+ rctx := &graphql.ResolverContext{
+ Object: "PageInfo",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.HasPreviousPage, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bool)
+ rctx.Result = res
return graphql.MarshalBoolean(res)
}
+// nolint: vetshadow
func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *models.PageInfo) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "PageInfo"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.StartCursor
+ rctx := &graphql.ResolverContext{
+ Object: "PageInfo",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.StartCursor, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graphql.CollectedField, obj *models.PageInfo) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "PageInfo"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.EndCursor
+ rctx := &graphql.ResolverContext{
+ Object: "PageInfo",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.EndCursor, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
var personImplementors = []string{"Person"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Person(ctx context.Context, sel []query.Selection, obj *bug.Person) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, personImplementors, ec.Variables)
+func (ec *executionContext) _Person(ctx context.Context, sel ast.SelectionSet, obj *bug.Person) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, personImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -1987,42 +3589,63 @@ func (ec *executionContext) _Person(ctx context.Context, sel []query.Selection,
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _Person_email(ctx context.Context, field graphql.CollectedField, obj *bug.Person) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Person"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Email
+ rctx := &graphql.ResolverContext{
+ Object: "Person",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Email, nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _Person_name(ctx context.Context, field graphql.CollectedField, obj *bug.Person) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Person"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Name
+ rctx := &graphql.ResolverContext{
+ Object: "Person",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Name, nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
var queryImplementors = []string{"Query"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Query(ctx context.Context, sel []query.Selection) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, queryImplementors, ec.Variables)
+func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, queryImplementors)
ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
Object: "Query",
})
+ var wg sync.WaitGroup
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -2030,144 +3653,149 @@ func (ec *executionContext) _Query(ctx context.Context, sel []query.Selection) g
case "__typename":
out.Values[i] = graphql.MarshalString("Query")
case "defaultRepository":
- out.Values[i] = ec._Query_defaultRepository(ctx, field)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._Query_defaultRepository(ctx, field)
+ wg.Done()
+ }(i, field)
case "repository":
- out.Values[i] = ec._Query_repository(ctx, field)
- case "__schema":
- out.Values[i] = ec._Query___schema(ctx, field)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._Query_repository(ctx, field)
+ wg.Done()
+ }(i, field)
case "__type":
out.Values[i] = ec._Query___type(ctx, field)
+ case "__schema":
+ out.Values[i] = ec._Query___schema(ctx, field)
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
-
+ wg.Wait()
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _Query_defaultRepository(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "Query",
Args: nil,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Query().DefaultRepository(ctx)
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*models.Repository)
+ rctx.Result = res
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Query_defaultRepository(ctx)
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
- }
- res := resTmp.(*models.Repository)
- if res == nil {
- return graphql.Null
- }
- return ec._Repository(ctx, field.Selections, res)
- })
+ if res == nil {
+ return graphql.Null
+ }
+
+ return ec._Repository(ctx, field.Selections, res)
}
+// nolint: vetshadow
func (ec *executionContext) _Query_repository(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 string
- if tmp, ok := field.Args["id"]; ok {
- var err error
- arg0, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Query_repository_args(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return graphql.Null
}
- args["id"] = arg0
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "Query",
Args: args,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Query().Repository(ctx, args["id"].(string))
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Query_repository(ctx, args["id"].(string))
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
- }
- res := resTmp.(*models.Repository)
- if res == nil {
- return graphql.Null
- }
- return ec._Repository(ctx, field.Selections, res)
- })
-}
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*models.Repository)
+ rctx.Result = res
-func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Query"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := ec.introspectSchema()
if res == nil {
return graphql.Null
}
- return ec.___Schema(ctx, field.Selections, res)
+
+ return ec._Repository(ctx, field.Selections, res)
}
+// nolint: vetshadow
func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 string
- if tmp, ok := field.Args["name"]; ok {
- var err error
- arg0, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Query___type_args(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return graphql.Null
}
- args["name"] = arg0
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "Query"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := ec.introspectType(args["name"].(string))
+ rctx := &graphql.ResolverContext{
+ Object: "Query",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.introspectType(args["name"].(string)), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*introspection.Type)
+ rctx.Result = res
+
if res == nil {
return graphql.Null
}
+
return ec.___Type(ctx, field.Selections, res)
}
+// nolint: vetshadow
+func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
+ rctx := &graphql.ResolverContext{
+ Object: "Query",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, nil, func(ctx context.Context) (interface{}, error) {
+ return ec.introspectSchema(), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*introspection.Schema)
+ rctx.Result = res
+
+ if res == nil {
+ return graphql.Null
+ }
+
+ return ec.___Schema(ctx, field.Selections, res)
+}
+
var repositoryImplementors = []string{"Repository"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Repository(ctx context.Context, sel []query.Selection, obj *models.Repository) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, repositoryImplementors, ec.Variables)
+func (ec *executionContext) _Repository(ctx context.Context, sel ast.SelectionSet, obj *models.Repository) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, repositoryImplementors)
+ var wg sync.WaitGroup
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -2175,174 +3803,99 @@ func (ec *executionContext) _Repository(ctx context.Context, sel []query.Selecti
case "__typename":
out.Values[i] = graphql.MarshalString("Repository")
case "allBugs":
- out.Values[i] = ec._Repository_allBugs(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._Repository_allBugs(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
case "bug":
- out.Values[i] = ec._Repository_bug(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._Repository_bug(ctx, field, obj)
+ wg.Done()
+ }(i, field)
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
-
+ wg.Wait()
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _Repository_allBugs(ctx context.Context, field graphql.CollectedField, obj *models.Repository) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := field.Args["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := field.Args["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := field.Args["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := field.Args["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- }
- args["last"] = arg3
- var arg4 *string
- if tmp, ok := field.Args["query"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg4 = &ptr1
- }
-
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Repository_allBugs_args(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return graphql.Null
}
- args["query"] = arg4
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "Repository",
Args: args,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Repository().AllBugs(ctx, obj, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int), args["query"].(*string))
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Repository_allBugs(ctx, obj, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int), args["query"].(*string))
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(models.BugConnection)
- return ec._BugConnection(ctx, field.Selections, &res)
- })
+ return graphql.Null
+ }
+ res := resTmp.(models.BugConnection)
+ rctx.Result = res
+
+ return ec._BugConnection(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _Repository_bug(ctx context.Context, field graphql.CollectedField, obj *models.Repository) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 string
- if tmp, ok := field.Args["prefix"]; ok {
- var err error
- arg0, err = graphql.UnmarshalString(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field_Repository_bug_args(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return graphql.Null
}
- args["prefix"] = arg0
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "Repository",
Args: args,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.Repository().Bug(ctx, obj, args["prefix"].(string))
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*bug.Snapshot)
+ rctx.Result = res
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.Repository_bug(ctx, obj, args["prefix"].(string))
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
- }
- res := resTmp.(*bug.Snapshot)
- if res == nil {
- return graphql.Null
- }
- return ec._Bug(ctx, field.Selections, res)
- })
+ if res == nil {
+ return graphql.Null
+ }
+
+ return ec._Bug(ctx, field.Selections, res)
}
var setStatusOperationImplementors = []string{"SetStatusOperation", "Operation", "Authored"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _SetStatusOperation(ctx context.Context, sel []query.Selection, obj *operations.SetStatusOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, setStatusOperationImplementors, ec.Variables)
+func (ec *executionContext) _SetStatusOperation(ctx context.Context, sel ast.SelectionSet, obj *operations.SetStatusOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, setStatusOperationImplementors)
+ var wg sync.WaitGroup
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -2351,96 +3904,114 @@ func (ec *executionContext) _SetStatusOperation(ctx context.Context, sel []query
out.Values[i] = graphql.MarshalString("SetStatusOperation")
case "author":
out.Values[i] = ec._SetStatusOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "date":
- out.Values[i] = ec._SetStatusOperation_date(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._SetStatusOperation_date(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
case "status":
- out.Values[i] = ec._SetStatusOperation_status(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._SetStatusOperation_status(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
-
+ wg.Wait()
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _SetStatusOperation_author(ctx context.Context, field graphql.CollectedField, obj *operations.SetStatusOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "SetStatusOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Author
+ rctx := &graphql.ResolverContext{
+ Object: "SetStatusOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Author, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Person)
+ rctx.Result = res
+
return ec._Person(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _SetStatusOperation_date(ctx context.Context, field graphql.CollectedField, obj *operations.SetStatusOperation) graphql.Marshaler {
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "SetStatusOperation",
Args: nil,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.SetStatusOperation().Date(ctx, obj)
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.SetStatusOperation_date(ctx, obj)
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(time.Time)
- return graphql.MarshalTime(res)
- })
+ return graphql.Null
+ }
+ res := resTmp.(time.Time)
+ rctx.Result = res
+ return graphql.MarshalTime(res)
}
+// nolint: vetshadow
func (ec *executionContext) _SetStatusOperation_status(ctx context.Context, field graphql.CollectedField, obj *operations.SetStatusOperation) graphql.Marshaler {
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "SetStatusOperation",
Args: nil,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.SetStatusOperation().Status(ctx, obj)
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.SetStatusOperation_status(ctx, obj)
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(models.Status)
- return res
- })
+ return graphql.Null
+ }
+ res := resTmp.(models.Status)
+ rctx.Result = res
+ return res
}
var setTitleOperationImplementors = []string{"SetTitleOperation", "Operation", "Authored"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _SetTitleOperation(ctx context.Context, sel []query.Selection, obj *operations.SetTitleOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, setTitleOperationImplementors, ec.Variables)
+func (ec *executionContext) _SetTitleOperation(ctx context.Context, sel ast.SelectionSet, obj *operations.SetTitleOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, setTitleOperationImplementors)
+ var wg sync.WaitGroup
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -2449,90 +4020,136 @@ func (ec *executionContext) _SetTitleOperation(ctx context.Context, sel []query.
out.Values[i] = graphql.MarshalString("SetTitleOperation")
case "author":
out.Values[i] = ec._SetTitleOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "date":
- out.Values[i] = ec._SetTitleOperation_date(ctx, field, obj)
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ out.Values[i] = ec._SetTitleOperation_date(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ wg.Done()
+ }(i, field)
case "title":
out.Values[i] = ec._SetTitleOperation_title(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "was":
out.Values[i] = ec._SetTitleOperation_was(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
-
+ wg.Wait()
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) _SetTitleOperation_author(ctx context.Context, field graphql.CollectedField, obj *operations.SetTitleOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "SetTitleOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Author
+ rctx := &graphql.ResolverContext{
+ Object: "SetTitleOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Author, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bug.Person)
+ rctx.Result = res
+
return ec._Person(ctx, field.Selections, &res)
}
+// nolint: vetshadow
func (ec *executionContext) _SetTitleOperation_date(ctx context.Context, field graphql.CollectedField, obj *operations.SetTitleOperation) graphql.Marshaler {
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ rctx := &graphql.ResolverContext{
Object: "SetTitleOperation",
Args: nil,
Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return ec.resolvers.SetTitleOperation().Date(ctx, obj)
})
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
-
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.SetTitleOperation_date(ctx, obj)
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
}
- res := resTmp.(time.Time)
- return graphql.MarshalTime(res)
- })
+ return graphql.Null
+ }
+ res := resTmp.(time.Time)
+ rctx.Result = res
+ return graphql.MarshalTime(res)
}
+// nolint: vetshadow
func (ec *executionContext) _SetTitleOperation_title(ctx context.Context, field graphql.CollectedField, obj *operations.SetTitleOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "SetTitleOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Title
+ rctx := &graphql.ResolverContext{
+ Object: "SetTitleOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Title, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) _SetTitleOperation_was(ctx context.Context, field graphql.CollectedField, obj *operations.SetTitleOperation) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "SetTitleOperation"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Was
+ rctx := &graphql.ResolverContext{
+ Object: "SetTitleOperation",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Was, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
var __DirectiveImplementors = []string{"__Directive"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___Directive(ctx context.Context, sel []query.Selection, obj *introspection.Directive) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, __DirectiveImplementors, ec.Variables)
+func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __DirectiveImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -2541,92 +4158,167 @@ func (ec *executionContext) ___Directive(ctx context.Context, sel []query.Select
out.Values[i] = graphql.MarshalString("__Directive")
case "name":
out.Values[i] = ec.___Directive_name(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "description":
out.Values[i] = ec.___Directive_description(ctx, field, obj)
case "locations":
out.Values[i] = ec.___Directive_locations(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "args":
out.Values[i] = ec.___Directive_args(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Directive"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Name()
+ rctx := &graphql.ResolverContext{
+ Object: "__Directive",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Name, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Directive"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Description()
- if res == nil {
+ rctx := &graphql.ResolverContext{
+ Object: "__Directive",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Description, nil
+ })
+ if resTmp == nil {
return graphql.Null
}
- return graphql.MarshalString(*res)
+ res := resTmp.(string)
+ rctx.Result = res
+ return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Directive"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Locations()
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Directive",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Locations, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]string)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
+ arr1[idx1] = func() graphql.Marshaler {
return graphql.MarshalString(res[idx1])
- }())
+ }()
}
+
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Directive"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Args()
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Directive",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Args, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]introspection.InputValue)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec.___InputValue(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec.___InputValue(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
var __EnumValueImplementors = []string{"__EnumValue"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___EnumValue(ctx context.Context, sel []query.Selection, obj *introspection.EnumValue) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, __EnumValueImplementors, ec.Variables)
+func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __EnumValueImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -2635,10 +4327,16 @@ func (ec *executionContext) ___EnumValue(ctx context.Context, sel []query.Select
out.Values[i] = graphql.MarshalString("__EnumValue")
case "name":
out.Values[i] = ec.___EnumValue_name(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "description":
out.Values[i] = ec.___EnumValue_description(ctx, field, obj)
case "isDeprecated":
out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "deprecationReason":
out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj)
default:
@@ -2646,66 +4344,102 @@ func (ec *executionContext) ___EnumValue(ctx context.Context, sel []query.Select
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__EnumValue"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Name()
+ rctx := &graphql.ResolverContext{
+ Object: "__EnumValue",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Name, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__EnumValue"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Description()
- if res == nil {
+ rctx := &graphql.ResolverContext{
+ Object: "__EnumValue",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Description, nil
+ })
+ if resTmp == nil {
return graphql.Null
}
- return graphql.MarshalString(*res)
+ res := resTmp.(string)
+ rctx.Result = res
+ return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__EnumValue"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.IsDeprecated()
+ rctx := &graphql.ResolverContext{
+ Object: "__EnumValue",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.IsDeprecated, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bool)
+ rctx.Result = res
return graphql.MarshalBoolean(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__EnumValue"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.DeprecationReason()
- if res == nil {
+ rctx := &graphql.ResolverContext{
+ Object: "__EnumValue",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.DeprecationReason, nil
+ })
+ if resTmp == nil {
return graphql.Null
}
- return graphql.MarshalString(*res)
+ res := resTmp.(string)
+ rctx.Result = res
+ return graphql.MarshalString(res)
}
var __FieldImplementors = []string{"__Field"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___Field(ctx context.Context, sel []query.Selection, obj *introspection.Field) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, __FieldImplementors, ec.Variables)
+func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __FieldImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -2714,14 +4448,26 @@ func (ec *executionContext) ___Field(ctx context.Context, sel []query.Selection,
out.Values[i] = graphql.MarshalString("__Field")
case "name":
out.Values[i] = ec.___Field_name(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "description":
out.Values[i] = ec.___Field_description(ctx, field, obj)
case "args":
out.Values[i] = ec.___Field_args(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "type":
out.Values[i] = ec.___Field_type(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "isDeprecated":
out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "deprecationReason":
out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj)
default:
@@ -2729,97 +4475,187 @@ func (ec *executionContext) ___Field(ctx context.Context, sel []query.Selection,
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Field"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Name()
+ rctx := &graphql.ResolverContext{
+ Object: "__Field",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Name, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Field"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Description()
- if res == nil {
+ rctx := &graphql.ResolverContext{
+ Object: "__Field",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Description, nil
+ })
+ if resTmp == nil {
return graphql.Null
}
- return graphql.MarshalString(*res)
+ res := resTmp.(string)
+ rctx.Result = res
+ return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Field"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Args()
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Field",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Args, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]introspection.InputValue)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec.___InputValue(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec.___InputValue(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Field"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Type()
- return ec.___Type(ctx, field.Selections, &res)
+ rctx := &graphql.ResolverContext{
+ Object: "__Field",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Type, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(*introspection.Type)
+ rctx.Result = res
+
+ if res == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+
+ return ec.___Type(ctx, field.Selections, res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Field"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.IsDeprecated()
+ rctx := &graphql.ResolverContext{
+ Object: "__Field",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.IsDeprecated, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(bool)
+ rctx.Result = res
return graphql.MarshalBoolean(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Field"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.DeprecationReason()
- if res == nil {
+ rctx := &graphql.ResolverContext{
+ Object: "__Field",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.DeprecationReason, nil
+ })
+ if resTmp == nil {
return graphql.Null
}
- return graphql.MarshalString(*res)
+ res := resTmp.(string)
+ rctx.Result = res
+ return graphql.MarshalString(res)
}
var __InputValueImplementors = []string{"__InputValue"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___InputValue(ctx context.Context, sel []query.Selection, obj *introspection.InputValue) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, __InputValueImplementors, ec.Variables)
+func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __InputValueImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -2828,10 +4664,16 @@ func (ec *executionContext) ___InputValue(ctx context.Context, sel []query.Selec
out.Values[i] = graphql.MarshalString("__InputValue")
case "name":
out.Values[i] = ec.___InputValue_name(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "description":
out.Values[i] = ec.___InputValue_description(ctx, field, obj)
case "type":
out.Values[i] = ec.___InputValue_type(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "defaultValue":
out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj)
default:
@@ -2839,53 +4681,100 @@ func (ec *executionContext) ___InputValue(ctx context.Context, sel []query.Selec
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__InputValue"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Name()
+ rctx := &graphql.ResolverContext{
+ Object: "__InputValue",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Name, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__InputValue"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Description()
- if res == nil {
+ rctx := &graphql.ResolverContext{
+ Object: "__InputValue",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Description, nil
+ })
+ if resTmp == nil {
return graphql.Null
}
- return graphql.MarshalString(*res)
+ res := resTmp.(string)
+ rctx.Result = res
+ return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__InputValue"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Type()
- return ec.___Type(ctx, field.Selections, &res)
+ rctx := &graphql.ResolverContext{
+ Object: "__InputValue",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Type, nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(*introspection.Type)
+ rctx.Result = res
+
+ if res == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+
+ return ec.___Type(ctx, field.Selections, res)
}
+// nolint: vetshadow
func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__InputValue"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.DefaultValue()
+ rctx := &graphql.ResolverContext{
+ Object: "__InputValue",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.DefaultValue, nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*string)
+ rctx.Result = res
+
if res == nil {
return graphql.Null
}
@@ -2895,10 +4784,11 @@ func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, fiel
var __SchemaImplementors = []string{"__Schema"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___Schema(ctx context.Context, sel []query.Selection, obj *introspection.Schema) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, __SchemaImplementors, ec.Variables)
+func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __SchemaImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -2907,108 +4797,230 @@ func (ec *executionContext) ___Schema(ctx context.Context, sel []query.Selection
out.Values[i] = graphql.MarshalString("__Schema")
case "types":
out.Values[i] = ec.___Schema_types(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "queryType":
out.Values[i] = ec.___Schema_queryType(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "mutationType":
out.Values[i] = ec.___Schema_mutationType(ctx, field, obj)
case "subscriptionType":
out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj)
case "directives":
out.Values[i] = ec.___Schema_directives(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Schema"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Types()
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Schema",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Types(), nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]introspection.Type)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec.___Type(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec.___Type(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Schema"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.QueryType()
- return ec.___Type(ctx, field.Selections, &res)
+ rctx := &graphql.ResolverContext{
+ Object: "__Schema",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.QueryType(), nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(*introspection.Type)
+ rctx.Result = res
+
+ if res == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+
+ return ec.___Type(ctx, field.Selections, res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Schema"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.MutationType()
+ rctx := &graphql.ResolverContext{
+ Object: "__Schema",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.MutationType(), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*introspection.Type)
+ rctx.Result = res
+
if res == nil {
return graphql.Null
}
+
return ec.___Type(ctx, field.Selections, res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Schema"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.SubscriptionType()
+ rctx := &graphql.ResolverContext{
+ Object: "__Schema",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.SubscriptionType(), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*introspection.Type)
+ rctx.Result = res
+
if res == nil {
return graphql.Null
}
+
return ec.___Type(ctx, field.Selections, res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Schema_directives(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Schema"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Directives()
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Schema",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Directives(), nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.([]introspection.Directive)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec.___Directive(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec.___Directive(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
var __TypeImplementors = []string{"__Type"}
// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___Type(ctx context.Context, sel []query.Selection, obj *introspection.Type) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, __TypeImplementors, ec.Variables)
+func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __TypeImplementors)
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -3017,6 +5029,9 @@ func (ec *executionContext) ___Type(ctx context.Context, sel []query.Selection,
out.Values[i] = graphql.MarshalString("__Type")
case "kind":
out.Values[i] = ec.___Type_kind(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
case "name":
out.Values[i] = ec.___Type_name(ctx, field, obj)
case "description":
@@ -3038,185 +5053,373 @@ func (ec *executionContext) ___Type(ctx context.Context, sel []query.Selection,
}
}
+ if invalid {
+ return graphql.Null
+ }
return out
}
+// nolint: vetshadow
func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Type"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Kind()
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Kind(), nil
+ })
+ if resTmp == nil {
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ res := resTmp.(string)
+ rctx.Result = res
return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Type"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Name()
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Name(), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*string)
+ rctx.Result = res
+
if res == nil {
return graphql.Null
}
return graphql.MarshalString(*res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Type_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Type"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Description()
- if res == nil {
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Description(), nil
+ })
+ if resTmp == nil {
return graphql.Null
}
- return graphql.MarshalString(*res)
+ res := resTmp.(string)
+ rctx.Result = res
+ return graphql.MarshalString(res)
}
+// nolint: vetshadow
func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 bool
- if tmp, ok := field.Args["includeDeprecated"]; ok {
- var err error
- arg0, err = graphql.UnmarshalBoolean(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field___Type_fields_args(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return graphql.Null
}
- args["includeDeprecated"] = arg0
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Type"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Fields(args["includeDeprecated"].(bool))
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Fields(args["includeDeprecated"].(bool)), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.([]introspection.Field)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec.___Field(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec.___Field(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Type"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.Interfaces()
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.Interfaces(), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.([]introspection.Type)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec.___Type(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec.___Type(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Type"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.PossibleTypes()
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.PossibleTypes(), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.([]introspection.Type)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec.___Type(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec.___Type(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
- args := map[string]interface{}{}
- var arg0 bool
- if tmp, ok := field.Args["includeDeprecated"]; ok {
- var err error
- arg0, err = graphql.UnmarshalBoolean(tmp)
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := field___Type_enumValues_args(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return graphql.Null
}
- args["includeDeprecated"] = arg0
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Type"
- rctx.Args = args
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.EnumValues(args["includeDeprecated"].(bool))
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Args: args,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.EnumValues(args["includeDeprecated"].(bool)), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.([]introspection.EnumValue)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec.___EnumValue(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec.___EnumValue(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Type"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.InputFields()
- arr1 := graphql.Array{}
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.InputFields(), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.([]introspection.InputValue)
+ rctx.Result = res
+
+ arr1 := make(graphql.Array, len(res))
+ var wg sync.WaitGroup
+
+ isLen1 := len(res) == 1
+ if !isLen1 {
+ wg.Add(len(res))
+ }
+
for idx1 := range res {
- arr1 = append(arr1, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex(idx1)
- defer rctx.Pop()
- return ec.___InputValue(ctx, field.Selections, &res[idx1])
- }())
+ idx1 := idx1
+ rctx := &graphql.ResolverContext{
+ Index: &idx1,
+ Result: &res[idx1],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(idx1 int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ arr1[idx1] = func() graphql.Marshaler {
+
+ return ec.___InputValue(ctx, field.Selections, &res[idx1])
+ }()
+ }
+ if isLen1 {
+ f(idx1)
+ } else {
+ go f(idx1)
+ }
+
}
+ wg.Wait()
return arr1
}
+// nolint: vetshadow
func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = "__Type"
- rctx.Args = nil
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- res := obj.OfType()
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Args: nil,
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, obj, func(ctx context.Context) (interface{}, error) {
+ return obj.OfType(), nil
+ })
+ if resTmp == nil {
+ return graphql.Null
+ }
+ res := resTmp.(*introspection.Type)
+ rctx.Result = res
+
if res == nil {
return graphql.Null
}
+
return ec.___Type(ctx, field.Selections, res)
}
-func (ec *executionContext) _Authored(ctx context.Context, sel []query.Selection, obj *models.Authored) graphql.Marshaler {
+func (ec *executionContext) _Authored(ctx context.Context, sel ast.SelectionSet, obj *models.Authored) graphql.Marshaler {
switch obj := (*obj).(type) {
case nil:
return graphql.Null
@@ -3249,7 +5452,7 @@ func (ec *executionContext) _Authored(ctx context.Context, sel []query.Selection
}
}
-func (ec *executionContext) _Operation(ctx context.Context, sel []query.Selection, obj *bug.Operation) graphql.Marshaler {
+func (ec *executionContext) _Operation(ctx context.Context, sel ast.SelectionSet, obj *bug.Operation) graphql.Marshaler {
switch obj := (*obj).(type) {
case nil:
return graphql.Null
@@ -3278,40 +5481,52 @@ func (ec *executionContext) _Operation(ctx context.Context, sel []query.Selectio
}
}
+func (ec *executionContext) FieldMiddleware(ctx context.Context, obj interface{}, next graphql.Resolver) (ret interface{}) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ res, err := ec.ResolverMiddleware(ctx, next)
+ if err != nil {
+ ec.Error(ctx, err)
+ return nil
+ }
+ return res
+}
+
func (ec *executionContext) introspectSchema() *introspection.Schema {
return introspection.WrapSchema(parsedSchema)
}
func (ec *executionContext) introspectType(name string) *introspection.Type {
- t := parsedSchema.Resolve(name)
- if t == nil {
- return nil
- }
- return introspection.WrapType(t)
+ return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name])
}
-var parsedSchema = schema.MustParse(`scalar Time
+var parsedSchema = gqlparser.MustLoadSchema(
+ &ast.Source{Name: "schema.graphql", Input: `scalar Time
scalar Label
scalar Hash
-# Information about pagination in a connection.
+"""Information about pagination in a connection."""
type PageInfo {
- # When paginating forwards, are there more items?
+ """When paginating forwards, are there more items?"""
hasNextPage: Boolean!
- # When paginating backwards, are there more items?
+ """When paginating backwards, are there more items?"""
hasPreviousPage: Boolean!
- # When paginating backwards, the cursor to continue.
+ """When paginating backwards, the cursor to continue."""
startCursor: String!
- # When paginating forwards, the cursor to continue.
+ """When paginating forwards, the cursor to continue."""
endCursor: String!
}
-# Represents an person in a git object.
+"""Represents an person in a git object."""
type Person {
- # The email of the person.
+ """The email of the person."""
email: String
- # The name of the person.
+ """The name of the person."""
name: String
}
@@ -3327,15 +5542,15 @@ type CommentEdge {
node: Comment!
}
-# Represents a comment on a bug.
+"""Represents a comment on a bug."""
type Comment implements Authored {
- # The author of this comment.
+ """The author of this comment."""
author: Person!
- # The message of this comment.
+ """The message of this comment."""
message: String!
- # All media's hash referenced in this comment
+ """All media's hash referenced in this comment"""
files: [Hash!]!
}
@@ -3344,9 +5559,9 @@ enum Status {
CLOSED
}
-# An object that has an author.
+"""An object that has an author."""
interface Authored {
- # The author of this object.
+ """The author of this object."""
author: Person!
}
@@ -3362,15 +5577,15 @@ type OperationEdge {
node: Operation!
}
-# An operation applied to a bug.
+"""An operation applied to a bug."""
interface Operation {
- # The operations author.
+ """The operations author."""
author: Person!
- # The datetime when this operation was issued.
+ """The datetime when this operation was issued."""
date: Time!
}
-type CreateOperation implements Operation, Authored {
+type CreateOperation implements Operation & Authored {
author: Person!
date: Time!
@@ -3379,7 +5594,7 @@ type CreateOperation implements Operation, Authored {
files: [Hash!]!
}
-type SetTitleOperation implements Operation, Authored {
+type SetTitleOperation implements Operation & Authored {
author: Person!
date: Time!
@@ -3387,7 +5602,7 @@ type SetTitleOperation implements Operation, Authored {
was: String!
}
-type AddCommentOperation implements Operation, Authored {
+type AddCommentOperation implements Operation & Authored {
author: Person!
date: Time!
@@ -3395,14 +5610,14 @@ type AddCommentOperation implements Operation, Authored {
files: [Hash!]!
}
-type SetStatusOperation implements Operation, Authored {
+type SetStatusOperation implements Operation & Authored {
author: Person!
date: Time!
status: Status!
}
-type LabelChangeOperation implements Operation, Authored {
+type LabelChangeOperation implements Operation & Authored {
author: Person!
date: Time!
@@ -3410,22 +5625,22 @@ type LabelChangeOperation implements Operation, Authored {
removed: [Label!]!
}
-# The connection type for Bug.
+"""The connection type for Bug."""
type BugConnection {
- # A list of edges.
+ """A list of edges."""
edges: [BugEdge!]!
nodes: [Bug!]!
- # Information to aid in pagination.
+ """Information to aid in pagination."""
pageInfo: PageInfo!
- # Identifies the total count of items in the connection.
+ """Identifies the total count of items in the connection."""
totalCount: Int!
}
-# An edge in a connection.
+"""An edge in a connection."""
type BugEdge {
- # A cursor for use in pagination.
+ """A cursor for use in pagination."""
cursor: String!
- # The item at the end of the edge.
+ """The item at the end of the edge."""
node: Bug!
}
@@ -3440,39 +5655,39 @@ type Bug {
lastEdit: Time!
comments(
- # Returns the elements in the list that come after the specified cursor.
+ """Returns the elements in the list that come after the specified cursor."""
after: String
- # Returns the elements in the list that come before the specified cursor.
+ """Returns the elements in the list that come before the specified cursor."""
before: String
- # Returns the first _n_ elements from the list.
+ """Returns the first _n_ elements from the list."""
first: Int
- # Returns the last _n_ elements from the list.
+ """Returns the last _n_ elements from the list."""
last: Int
): CommentConnection!
operations(
- # Returns the elements in the list that come after the specified cursor.
+ """Returns the elements in the list that come after the specified cursor."""
after: String
- # Returns the elements in the list that come before the specified cursor.
+ """Returns the elements in the list that come before the specified cursor."""
before: String
- # Returns the first _n_ elements from the list.
+ """Returns the first _n_ elements from the list."""
first: Int
- # Returns the last _n_ elements from the list.
+ """Returns the last _n_ elements from the list."""
last: Int
): OperationConnection!
}
type Repository {
allBugs(
- # Returns the elements in the list that come after the specified cursor.
+ """Returns the elements in the list that come after the specified cursor."""
after: String
- # Returns the elements in the list that come before the specified cursor.
+ """Returns the elements in the list that come before the specified cursor."""
before: String
- # Returns the first _n_ elements from the list.
+ """Returns the first _n_ elements from the list."""
first: Int
- # Returns the last _n_ elements from the list.
+ """Returns the last _n_ elements from the list."""
last: Int
- # A query to select and order bugs
+ """A query to select and order bugs"""
query: String
): BugConnection!
bug(prefix: String!): Bug
@@ -3494,4 +5709,5 @@ type Mutation {
commit(repoRef: String, prefix: String!): Bug!
}
-`)
+`},
+)
diff --git a/graphql/handler.go b/graphql/handler.go
index 8448d1d2..7b940d8f 100644
--- a/graphql/handler.go
+++ b/graphql/handler.go
@@ -3,10 +3,10 @@
package graphql
import (
+ "github.com/99designs/gqlgen/handler"
"github.com/MichaelMure/git-bug/graphql/graph"
"github.com/MichaelMure/git-bug/graphql/resolvers"
"github.com/MichaelMure/git-bug/repository"
- "github.com/vektah/gqlgen/handler"
"net/http"
)
@@ -25,7 +25,11 @@ func NewHandler(repo repository.Repo) (Handler, error) {
return Handler{}, err
}
- h.HandlerFunc = handler.GraphQL(graph.NewExecutableSchema(h.RootResolver))
+ config := graph.Config{
+ Resolvers: h.RootResolver,
+ }
+
+ h.HandlerFunc = handler.GraphQL(graph.NewExecutableSchema(config))
return h, nil
}
diff --git a/graphql/models/gen_models.go b/graphql/models/gen_models.go
index 1ed46029..23a25814 100644
--- a/graphql/models/gen_models.go
+++ b/graphql/models/gen_models.go
@@ -1,4 +1,4 @@
-// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.
+// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
package models
@@ -10,37 +10,48 @@ import (
bug "github.com/MichaelMure/git-bug/bug"
)
+// An object that has an author.
type Authored interface{}
+
+// The connection type for Bug.
type BugConnection struct {
Edges []BugEdge `json:"edges"`
Nodes []bug.Snapshot `json:"nodes"`
PageInfo PageInfo `json:"pageInfo"`
TotalCount int `json:"totalCount"`
}
+
+// An edge in a connection.
type BugEdge struct {
Cursor string `json:"cursor"`
Node bug.Snapshot `json:"node"`
}
+
type CommentConnection struct {
Edges []CommentEdge `json:"edges"`
Nodes []bug.Comment `json:"nodes"`
PageInfo PageInfo `json:"pageInfo"`
TotalCount int `json:"totalCount"`
}
+
type CommentEdge struct {
Cursor string `json:"cursor"`
Node bug.Comment `json:"node"`
}
+
type OperationConnection struct {
Edges []OperationEdge `json:"edges"`
Nodes []bug.Operation `json:"nodes"`
PageInfo PageInfo `json:"pageInfo"`
TotalCount int `json:"totalCount"`
}
+
type OperationEdge struct {
Cursor string `json:"cursor"`
Node bug.Operation `json:"node"`
}
+
+// Information about pagination in a connection.
type PageInfo struct {
HasNextPage bool `json:"hasNextPage"`
HasPreviousPage bool `json:"hasPreviousPage"`
diff --git a/graphql/schema.graphql b/graphql/schema.graphql
index 779331c4..733b9f1a 100644
--- a/graphql/schema.graphql
+++ b/graphql/schema.graphql
@@ -2,24 +2,24 @@ scalar Time
scalar Label
scalar Hash
-# Information about pagination in a connection.
+"""Information about pagination in a connection."""
type PageInfo {
- # When paginating forwards, are there more items?
+ """When paginating forwards, are there more items?"""
hasNextPage: Boolean!
- # When paginating backwards, are there more items?
+ """When paginating backwards, are there more items?"""
hasPreviousPage: Boolean!
- # When paginating backwards, the cursor to continue.
+ """When paginating backwards, the cursor to continue."""
startCursor: String!
- # When paginating forwards, the cursor to continue.
+ """When paginating forwards, the cursor to continue."""
endCursor: String!
}
-# Represents an person in a git object.
+"""Represents an person in a git object."""
type Person {
- # The email of the person.
+ """The email of the person."""
email: String
- # The name of the person.
+ """The name of the person."""
name: String
}
@@ -35,15 +35,15 @@ type CommentEdge {
node: Comment!
}
-# Represents a comment on a bug.
+"""Represents a comment on a bug."""
type Comment implements Authored {
- # The author of this comment.
+ """The author of this comment."""
author: Person!
- # The message of this comment.
+ """The message of this comment."""
message: String!
- # All media's hash referenced in this comment
+ """All media's hash referenced in this comment"""
files: [Hash!]!
}
@@ -52,9 +52,9 @@ enum Status {
CLOSED
}
-# An object that has an author.
+"""An object that has an author."""
interface Authored {
- # The author of this object.
+ """The author of this object."""
author: Person!
}
@@ -70,15 +70,15 @@ type OperationEdge {
node: Operation!
}
-# An operation applied to a bug.
+"""An operation applied to a bug."""
interface Operation {
- # The operations author.
+ """The operations author."""
author: Person!
- # The datetime when this operation was issued.
+ """The datetime when this operation was issued."""
date: Time!
}
-type CreateOperation implements Operation, Authored {
+type CreateOperation implements Operation & Authored {
author: Person!
date: Time!
@@ -87,7 +87,7 @@ type CreateOperation implements Operation, Authored {
files: [Hash!]!
}
-type SetTitleOperation implements Operation, Authored {
+type SetTitleOperation implements Operation & Authored {
author: Person!
date: Time!
@@ -95,7 +95,7 @@ type SetTitleOperation implements Operation, Authored {
was: String!
}
-type AddCommentOperation implements Operation, Authored {
+type AddCommentOperation implements Operation & Authored {
author: Person!
date: Time!
@@ -103,14 +103,14 @@ type AddCommentOperation implements Operation, Authored {
files: [Hash!]!
}
-type SetStatusOperation implements Operation, Authored {
+type SetStatusOperation implements Operation & Authored {
author: Person!
date: Time!
status: Status!
}
-type LabelChangeOperation implements Operation, Authored {
+type LabelChangeOperation implements Operation & Authored {
author: Person!
date: Time!
@@ -118,22 +118,22 @@ type LabelChangeOperation implements Operation, Authored {
removed: [Label!]!
}
-# The connection type for Bug.
+"""The connection type for Bug."""
type BugConnection {
- # A list of edges.
+ """A list of edges."""
edges: [BugEdge!]!
nodes: [Bug!]!
- # Information to aid in pagination.
+ """Information to aid in pagination."""
pageInfo: PageInfo!
- # Identifies the total count of items in the connection.
+ """Identifies the total count of items in the connection."""
totalCount: Int!
}
-# An edge in a connection.
+"""An edge in a connection."""
type BugEdge {
- # A cursor for use in pagination.
+ """A cursor for use in pagination."""
cursor: String!
- # The item at the end of the edge.
+ """The item at the end of the edge."""
node: Bug!
}
@@ -148,39 +148,39 @@ type Bug {
lastEdit: Time!
comments(
- # Returns the elements in the list that come after the specified cursor.
+ """Returns the elements in the list that come after the specified cursor."""
after: String
- # Returns the elements in the list that come before the specified cursor.
+ """Returns the elements in the list that come before the specified cursor."""
before: String
- # Returns the first _n_ elements from the list.
+ """Returns the first _n_ elements from the list."""
first: Int
- # Returns the last _n_ elements from the list.
+ """Returns the last _n_ elements from the list."""
last: Int
): CommentConnection!
operations(
- # Returns the elements in the list that come after the specified cursor.
+ """Returns the elements in the list that come after the specified cursor."""
after: String
- # Returns the elements in the list that come before the specified cursor.
+ """Returns the elements in the list that come before the specified cursor."""
before: String
- # Returns the first _n_ elements from the list.
+ """Returns the first _n_ elements from the list."""
first: Int
- # Returns the last _n_ elements from the list.
+ """Returns the last _n_ elements from the list."""
last: Int
): OperationConnection!
}
type Repository {
allBugs(
- # Returns the elements in the list that come after the specified cursor.
+ """Returns the elements in the list that come after the specified cursor."""
after: String
- # Returns the elements in the list that come before the specified cursor.
+ """Returns the elements in the list that come before the specified cursor."""
before: String
- # Returns the first _n_ elements from the list.
+ """Returns the first _n_ elements from the list."""
first: Int
- # Returns the last _n_ elements from the list.
+ """Returns the last _n_ elements from the list."""
last: Int
- # A query to select and order bugs
+ """A query to select and order bugs"""
query: String
): BugConnection!
bug(prefix: String!): Bug
diff --git a/vendor/github.com/vektah/gqlgen/LICENSE b/vendor/github.com/99designs/gqlgen/LICENSE
index 18e1b249..18e1b249 100644
--- a/vendor/github.com/vektah/gqlgen/LICENSE
+++ b/vendor/github.com/99designs/gqlgen/LICENSE
diff --git a/vendor/github.com/99designs/gqlgen/codegen/build.go b/vendor/github.com/99designs/gqlgen/codegen/build.go
new file mode 100644
index 00000000..42dedbf8
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/build.go
@@ -0,0 +1,214 @@
+package codegen
+
+import (
+ "fmt"
+ "go/build"
+ "go/types"
+ "os"
+
+ "github.com/pkg/errors"
+ "golang.org/x/tools/go/loader"
+)
+
+type Build struct {
+ PackageName string
+ Objects Objects
+ Inputs Objects
+ Interfaces []*Interface
+ Imports []*Import
+ QueryRoot *Object
+ MutationRoot *Object
+ SubscriptionRoot *Object
+ SchemaRaw string
+ SchemaFilename string
+ Directives []*Directive
+}
+
+type ModelBuild struct {
+ PackageName string
+ Imports []*Import
+ Models []Model
+ Enums []Enum
+}
+
+type ResolverBuild struct {
+ PackageName string
+ Imports []*Import
+ ResolverType string
+ Objects Objects
+ ResolverFound bool
+}
+
+type ServerBuild struct {
+ PackageName string
+ Imports []*Import
+ ExecPackageName string
+ ResolverPackageName string
+}
+
+// Create a list of models that need to be generated
+func (cfg *Config) models() (*ModelBuild, error) {
+ namedTypes := cfg.buildNamedTypes()
+
+ progLoader := newLoader(namedTypes, true)
+ prog, err := progLoader.Load()
+ if err != nil {
+ return nil, errors.Wrap(err, "loading failed")
+ }
+ imports := buildImports(namedTypes, cfg.Model.Dir())
+
+ cfg.bindTypes(imports, namedTypes, cfg.Model.Dir(), prog)
+
+ models, err := cfg.buildModels(namedTypes, prog, imports)
+ if err != nil {
+ return nil, err
+ }
+ return &ModelBuild{
+ PackageName: cfg.Model.Package,
+ Models: models,
+ Enums: cfg.buildEnums(namedTypes),
+ Imports: imports.finalize(),
+ }, nil
+}
+
+// bind a schema together with some code to generate a Build
+func (cfg *Config) resolver() (*ResolverBuild, error) {
+ progLoader := newLoader(cfg.buildNamedTypes(), true)
+ progLoader.Import(cfg.Resolver.ImportPath())
+
+ prog, err := progLoader.Load()
+ if err != nil {
+ return nil, err
+ }
+
+ destDir := cfg.Resolver.Dir()
+
+ namedTypes := cfg.buildNamedTypes()
+ imports := buildImports(namedTypes, destDir)
+ imports.add(cfg.Exec.ImportPath())
+ imports.add("github.com/99designs/gqlgen/handler") // avoid import github.com/vektah/gqlgen/handler
+
+ cfg.bindTypes(imports, namedTypes, destDir, prog)
+
+ objects, err := cfg.buildObjects(namedTypes, prog, imports)
+ if err != nil {
+ return nil, err
+ }
+
+ def, _ := findGoType(prog, cfg.Resolver.ImportPath(), cfg.Resolver.Type)
+ resolverFound := def != nil
+
+ return &ResolverBuild{
+ PackageName: cfg.Resolver.Package,
+ Imports: imports.finalize(),
+ Objects: objects,
+ ResolverType: cfg.Resolver.Type,
+ ResolverFound: resolverFound,
+ }, nil
+}
+
+func (cfg *Config) server(destDir string) *ServerBuild {
+ imports := buildImports(NamedTypes{}, destDir)
+ imports.add(cfg.Exec.ImportPath())
+ imports.add(cfg.Resolver.ImportPath())
+
+ return &ServerBuild{
+ PackageName: cfg.Resolver.Package,
+ Imports: imports.finalize(),
+ ExecPackageName: cfg.Exec.Package,
+ ResolverPackageName: cfg.Resolver.Package,
+ }
+}
+
+// bind a schema together with some code to generate a Build
+func (cfg *Config) bind() (*Build, error) {
+ namedTypes := cfg.buildNamedTypes()
+
+ progLoader := newLoader(namedTypes, true)
+ prog, err := progLoader.Load()
+ if err != nil {
+ return nil, errors.Wrap(err, "loading failed")
+ }
+
+ imports := buildImports(namedTypes, cfg.Exec.Dir())
+ cfg.bindTypes(imports, namedTypes, cfg.Exec.Dir(), prog)
+
+ objects, err := cfg.buildObjects(namedTypes, prog, imports)
+ if err != nil {
+ return nil, err
+ }
+
+ inputs, err := cfg.buildInputs(namedTypes, prog, imports)
+ if err != nil {
+ return nil, err
+ }
+ directives, err := cfg.buildDirectives(namedTypes)
+ if err != nil {
+ return nil, err
+ }
+
+ b := &Build{
+ PackageName: cfg.Exec.Package,
+ Objects: objects,
+ Interfaces: cfg.buildInterfaces(namedTypes, prog),
+ Inputs: inputs,
+ Imports: imports.finalize(),
+ SchemaRaw: cfg.SchemaStr,
+ SchemaFilename: cfg.SchemaFilename,
+ Directives: directives,
+ }
+
+ if cfg.schema.Query != nil {
+ b.QueryRoot = b.Objects.ByName(cfg.schema.Query.Name)
+ } else {
+ return b, fmt.Errorf("query entry point missing")
+ }
+
+ if cfg.schema.Mutation != nil {
+ b.MutationRoot = b.Objects.ByName(cfg.schema.Mutation.Name)
+ }
+
+ if cfg.schema.Subscription != nil {
+ b.SubscriptionRoot = b.Objects.ByName(cfg.schema.Subscription.Name)
+ }
+ return b, nil
+}
+
+func (cfg *Config) validate() error {
+ progLoader := newLoader(cfg.buildNamedTypes(), false)
+ _, err := progLoader.Load()
+ return err
+}
+
+func newLoader(namedTypes NamedTypes, allowErrors bool) loader.Config {
+ conf := loader.Config{}
+ if allowErrors {
+ conf = loader.Config{
+ AllowErrors: true,
+ TypeChecker: types.Config{
+ Error: func(e error) {},
+ },
+ }
+ }
+ for _, imp := range ambientImports {
+ conf.Import(imp)
+ }
+
+ for _, imp := range namedTypes {
+ if imp.Package != "" {
+ conf.Import(imp.Package)
+ }
+ }
+ return conf
+}
+
+func resolvePkg(pkgName string) (string, error) {
+ cwd, _ := os.Getwd()
+
+ pkg, err := build.Default.Import(pkgName, cwd, build.FindOnly)
+ if err != nil {
+ return "", err
+ }
+
+ return pkg.ImportPath, nil
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/codegen.go b/vendor/github.com/99designs/gqlgen/codegen/codegen.go
new file mode 100644
index 00000000..27873400
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/codegen.go
@@ -0,0 +1,174 @@
+package codegen
+
+import (
+ "log"
+ "os"
+ "path/filepath"
+ "regexp"
+ "syscall"
+
+ "github.com/99designs/gqlgen/codegen/templates"
+ "github.com/pkg/errors"
+ "github.com/vektah/gqlparser"
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+func Generate(cfg Config) error {
+ if err := cfg.normalize(); err != nil {
+ return err
+ }
+
+ _ = syscall.Unlink(cfg.Exec.Filename)
+ _ = syscall.Unlink(cfg.Model.Filename)
+
+ modelsBuild, err := cfg.models()
+ if err != nil {
+ return errors.Wrap(err, "model plan failed")
+ }
+ if len(modelsBuild.Models) > 0 || len(modelsBuild.Enums) > 0 {
+ if err = templates.RenderToFile("models.gotpl", cfg.Model.Filename, modelsBuild); err != nil {
+ return err
+ }
+
+ for _, model := range modelsBuild.Models {
+ modelCfg := cfg.Models[model.GQLType]
+ modelCfg.Model = cfg.Model.ImportPath() + "." + model.GoType
+ cfg.Models[model.GQLType] = modelCfg
+ }
+
+ for _, enum := range modelsBuild.Enums {
+ modelCfg := cfg.Models[enum.GQLType]
+ modelCfg.Model = cfg.Model.ImportPath() + "." + enum.GoType
+ cfg.Models[enum.GQLType] = modelCfg
+ }
+ }
+
+ build, err := cfg.bind()
+ if err != nil {
+ return errors.Wrap(err, "exec plan failed")
+ }
+
+ if err := templates.RenderToFile("generated.gotpl", cfg.Exec.Filename, build); err != nil {
+ return err
+ }
+
+ if cfg.Resolver.IsDefined() {
+ if err := generateResolver(cfg); err != nil {
+ return errors.Wrap(err, "generating resolver failed")
+ }
+ }
+
+ if err := cfg.validate(); err != nil {
+ return errors.Wrap(err, "validation failed")
+ }
+
+ return nil
+}
+
+func GenerateServer(cfg Config, filename string) error {
+ if err := cfg.Exec.normalize(); err != nil {
+ return errors.Wrap(err, "exec")
+ }
+ if err := cfg.Resolver.normalize(); err != nil {
+ return errors.Wrap(err, "resolver")
+ }
+
+ serverFilename := abs(filename)
+ serverBuild := cfg.server(filepath.Dir(serverFilename))
+
+ if _, err := os.Stat(serverFilename); os.IsNotExist(errors.Cause(err)) {
+ err = templates.RenderToFile("server.gotpl", serverFilename, serverBuild)
+ if err != nil {
+ return errors.Wrap(err, "generate server failed")
+ }
+ } else {
+ log.Printf("Skipped server: %s already exists\n", serverFilename)
+ }
+ return nil
+}
+
+func generateResolver(cfg Config) error {
+ resolverBuild, err := cfg.resolver()
+ if err != nil {
+ return errors.Wrap(err, "resolver build failed")
+ }
+ filename := cfg.Resolver.Filename
+
+ if resolverBuild.ResolverFound {
+ log.Printf("Skipped resolver: %s.%s already exists\n", cfg.Resolver.ImportPath(), cfg.Resolver.Type)
+ return nil
+ }
+
+ if _, err := os.Stat(filename); os.IsNotExist(errors.Cause(err)) {
+ if err := templates.RenderToFile("resolver.gotpl", filename, resolverBuild); err != nil {
+ return err
+ }
+ } else {
+ log.Printf("Skipped resolver: %s already exists\n", filename)
+ }
+
+ return nil
+}
+
+func (cfg *Config) normalize() error {
+ if err := cfg.Model.normalize(); err != nil {
+ return errors.Wrap(err, "model")
+ }
+
+ if err := cfg.Exec.normalize(); err != nil {
+ return errors.Wrap(err, "exec")
+ }
+
+ if cfg.Resolver.IsDefined() {
+ if err := cfg.Resolver.normalize(); err != nil {
+ return errors.Wrap(err, "resolver")
+ }
+ }
+
+ builtins := TypeMap{
+ "__Directive": {Model: "github.com/99designs/gqlgen/graphql/introspection.Directive"},
+ "__Type": {Model: "github.com/99designs/gqlgen/graphql/introspection.Type"},
+ "__Field": {Model: "github.com/99designs/gqlgen/graphql/introspection.Field"},
+ "__EnumValue": {Model: "github.com/99designs/gqlgen/graphql/introspection.EnumValue"},
+ "__InputValue": {Model: "github.com/99designs/gqlgen/graphql/introspection.InputValue"},
+ "__Schema": {Model: "github.com/99designs/gqlgen/graphql/introspection.Schema"},
+ "Int": {Model: "github.com/99designs/gqlgen/graphql.Int"},
+ "Float": {Model: "github.com/99designs/gqlgen/graphql.Float"},
+ "String": {Model: "github.com/99designs/gqlgen/graphql.String"},
+ "Boolean": {Model: "github.com/99designs/gqlgen/graphql.Boolean"},
+ "ID": {Model: "github.com/99designs/gqlgen/graphql.ID"},
+ "Time": {Model: "github.com/99designs/gqlgen/graphql.Time"},
+ "Map": {Model: "github.com/99designs/gqlgen/graphql.Map"},
+ }
+
+ if cfg.Models == nil {
+ cfg.Models = TypeMap{}
+ }
+ for typeName, entry := range builtins {
+ if !cfg.Models.Exists(typeName) {
+ cfg.Models[typeName] = entry
+ }
+ }
+
+ var err *gqlerror.Error
+ cfg.schema, err = gqlparser.LoadSchema(&ast.Source{Name: cfg.SchemaFilename, Input: cfg.SchemaStr})
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+var invalidPackageNameChar = regexp.MustCompile(`[^\w]`)
+
+func sanitizePackageName(pkg string) string {
+ return invalidPackageNameChar.ReplaceAllLiteralString(filepath.Base(pkg), "_")
+}
+
+func abs(path string) string {
+ absPath, err := filepath.Abs(path)
+ if err != nil {
+ panic(err)
+ }
+ return filepath.ToSlash(absPath)
+}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/config.go b/vendor/github.com/99designs/gqlgen/codegen/config.go
index cd42ae6b..db0e467b 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/config.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/config.go
@@ -8,26 +8,29 @@ import (
"path/filepath"
"strings"
+ "github.com/99designs/gqlgen/internal/gopath"
"github.com/pkg/errors"
- "github.com/vektah/gqlgen/neelance/schema"
+ "github.com/vektah/gqlparser/ast"
"gopkg.in/yaml.v2"
)
-var defaults = Config{
- SchemaFilename: "schema.graphql",
- Model: PackageConfig{Filename: "models_gen.go"},
- Exec: PackageConfig{Filename: "generated.go"},
-}
-
var cfgFilenames = []string{".gqlgen.yml", "gqlgen.yml", "gqlgen.yaml"}
-// LoadDefaultConfig looks for a config file in the current directory, and all parent directories
+// DefaultConfig creates a copy of the default config
+func DefaultConfig() *Config {
+ return &Config{
+ SchemaFilename: "schema.graphql",
+ Model: PackageConfig{Filename: "models_gen.go"},
+ Exec: PackageConfig{Filename: "generated.go"},
+ }
+}
+
+// LoadConfigFromDefaultLocations looks for a config file in the current directory, and all parent directories
// walking up the tree. The closest config file will be returned.
-func LoadDefaultConfig() (*Config, error) {
+func LoadConfigFromDefaultLocations() (*Config, error) {
cfgFile, err := findCfg()
- if err != nil || cfgFile == "" {
- cpy := defaults
- return &cpy, err
+ if err != nil {
+ return nil, err
}
err = os.Chdir(filepath.Dir(cfgFile))
@@ -39,18 +42,20 @@ func LoadDefaultConfig() (*Config, error) {
// LoadConfig reads the gqlgen.yml config file
func LoadConfig(filename string) (*Config, error) {
- config := defaults
+ config := DefaultConfig()
b, err := ioutil.ReadFile(filename)
if err != nil {
return nil, errors.Wrap(err, "unable to read config")
}
- if err := yaml.UnmarshalStrict(b, &config); err != nil {
+ if err := yaml.UnmarshalStrict(b, config); err != nil {
return nil, errors.Wrap(err, "unable to parse config")
}
- return &config, nil
+ config.FilePath = filename
+
+ return config, nil
}
type Config struct {
@@ -58,14 +63,19 @@ type Config struct {
SchemaStr string `yaml:"-"`
Exec PackageConfig `yaml:"exec"`
Model PackageConfig `yaml:"model"`
+ Resolver PackageConfig `yaml:"resolver,omitempty"`
Models TypeMap `yaml:"models,omitempty"`
+ StructTag string `yaml:"struct_tag,omitempty"`
+
+ FilePath string `yaml:"-"`
- schema *schema.Schema `yaml:"-"`
+ schema *ast.Schema `yaml:"-"`
}
type PackageConfig struct {
Filename string `yaml:"filename,omitempty"`
Package string `yaml:"package,omitempty"`
+ Type string `yaml:"type,omitempty"`
}
type TypeMapEntry struct {
@@ -74,7 +84,8 @@ type TypeMapEntry struct {
}
type TypeMapField struct {
- Resolver bool `yaml:"resolver"`
+ Resolver bool `yaml:"resolver"`
+ FieldName string `yaml:"fieldName"`
}
func (c *PackageConfig) normalize() error {
@@ -98,22 +109,11 @@ func (c *PackageConfig) normalize() error {
}
func (c *PackageConfig) ImportPath() string {
- dir := filepath.ToSlash(c.Dir())
- for _, gopath := range filepath.SplitList(build.Default.GOPATH) {
- gopath = filepath.ToSlash(gopath) + "/src/"
- if len(gopath) > len(dir) {
- continue
- }
- if strings.EqualFold(gopath, dir[0:len(gopath)]) {
- dir = dir[len(gopath):]
- break
- }
- }
- return dir
+ return gopath.MustDir2Import(c.Dir())
}
func (c *PackageConfig) Dir() string {
- return filepath.ToSlash(filepath.Dir(c.Filename))
+ return filepath.Dir(c.Filename)
}
func (c *PackageConfig) Check() error {
@@ -126,6 +126,10 @@ func (c *PackageConfig) Check() error {
return nil
}
+func (c *PackageConfig) IsDefined() bool {
+ return c.Filename != ""
+}
+
func (cfg *Config) Check() error {
if err := cfg.Models.Check(); err != nil {
return errors.Wrap(err, "config.models")
@@ -136,6 +140,9 @@ func (cfg *Config) Check() error {
if err := cfg.Model.Check(); err != nil {
return errors.Wrap(err, "config.model")
}
+ if err := cfg.Resolver.Check(); err != nil {
+ return errors.Wrap(err, "config.resolver")
+ }
return nil
}
@@ -170,6 +177,10 @@ func findCfg() (string, error) {
cfg = findCfgInDir(dir)
}
+ if cfg == "" {
+ return "", os.ErrNotExist
+ }
+
return cfg, nil
}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/directive.go b/vendor/github.com/99designs/gqlgen/codegen/directive.go
new file mode 100644
index 00000000..8017da06
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/directive.go
@@ -0,0 +1,41 @@
+package codegen
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+type Directive struct {
+ Name string
+ Args []FieldArgument
+}
+
+func (d *Directive) ArgsFunc() string {
+ if len(d.Args) == 0 {
+ return ""
+ }
+
+ return "dir_" + d.Name + "_args"
+}
+
+func (d *Directive) CallArgs() string {
+ args := []string{"ctx", "obj", "n"}
+
+ for _, arg := range d.Args {
+ args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
+ }
+
+ return strings.Join(args, ", ")
+}
+
+func (d *Directive) Declaration() string {
+ res := ucFirst(d.Name) + " func(ctx context.Context, obj interface{}, next graphql.Resolver"
+
+ for _, arg := range d.Args {
+ res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
+ }
+
+ res += ") (res interface{}, err error)"
+ return res
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/directive_build.go b/vendor/github.com/99designs/gqlgen/codegen/directive_build.go
new file mode 100644
index 00000000..32828841
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/directive_build.go
@@ -0,0 +1,49 @@
+package codegen
+
+import (
+ "sort"
+
+ "github.com/pkg/errors"
+)
+
+func (cfg *Config) buildDirectives(types NamedTypes) ([]*Directive, error) {
+ var directives []*Directive
+
+ for name, dir := range cfg.schema.Directives {
+ if name == "skip" || name == "include" || name == "deprecated" {
+ continue
+ }
+
+ var args []FieldArgument
+ for _, arg := range dir.Arguments {
+ newArg := FieldArgument{
+ GQLName: arg.Name,
+ Type: types.getType(arg.Type),
+ GoVarName: sanitizeArgName(arg.Name),
+ }
+
+ if !newArg.Type.IsInput && !newArg.Type.IsScalar {
+ return nil, errors.Errorf("%s cannot be used as argument of directive %s(%s) only input and scalar types are allowed", arg.Type, dir.Name, arg.Name)
+ }
+
+ if arg.DefaultValue != nil {
+ var err error
+ newArg.Default, err = arg.DefaultValue.Value(nil)
+ if err != nil {
+ return nil, errors.Errorf("default value for directive argument %s(%s) is not valid: %s", dir.Name, arg.Name, err.Error())
+ }
+ newArg.StripPtr()
+ }
+ args = append(args, newArg)
+ }
+
+ directives = append(directives, &Directive{
+ Name: name,
+ Args: args,
+ })
+ }
+
+ sort.Slice(directives, func(i, j int) bool { return directives[i].Name < directives[j].Name })
+
+ return directives, nil
+}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/enum.go b/vendor/github.com/99designs/gqlgen/codegen/enum.go
index e62fd2b1..7804971c 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/enum.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/enum.go
@@ -2,8 +2,8 @@ package codegen
type Enum struct {
*NamedType
-
- Values []EnumValue
+ Description string
+ Values []EnumValue
}
type EnumValue struct {
diff --git a/vendor/github.com/99designs/gqlgen/codegen/enum_build.go b/vendor/github.com/99designs/gqlgen/codegen/enum_build.go
new file mode 100644
index 00000000..457d923f
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/enum_build.go
@@ -0,0 +1,39 @@
+package codegen
+
+import (
+ "sort"
+ "strings"
+
+ "github.com/99designs/gqlgen/codegen/templates"
+ "github.com/vektah/gqlparser/ast"
+)
+
+func (cfg *Config) buildEnums(types NamedTypes) []Enum {
+ var enums []Enum
+
+ for _, typ := range cfg.schema.Types {
+ namedType := types[typ.Name]
+ if typ.Kind != ast.Enum || strings.HasPrefix(typ.Name, "__") || namedType.IsUserDefined {
+ continue
+ }
+
+ var values []EnumValue
+ for _, v := range typ.EnumValues {
+ values = append(values, EnumValue{v.Name, v.Description})
+ }
+
+ enum := Enum{
+ NamedType: namedType,
+ Values: values,
+ Description: typ.Description,
+ }
+ enum.GoType = templates.ToCamel(enum.GQLType)
+ enums = append(enums, enum)
+ }
+
+ sort.Slice(enums, func(i, j int) bool {
+ return enums[i].GQLType < enums[j].GQLType
+ })
+
+ return enums
+}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/import.go b/vendor/github.com/99designs/gqlgen/codegen/import.go
index b511e8f6..b511e8f6 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/import.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/import.go
diff --git a/vendor/github.com/vektah/gqlgen/codegen/import_build.go b/vendor/github.com/99designs/gqlgen/codegen/import_build.go
index f0877ed3..d634834e 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/import_build.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/import_build.go
@@ -5,7 +5,14 @@ import (
"go/build"
"sort"
"strconv"
- "strings"
+
+ // Import and ignore the ambient imports listed below so dependency managers
+ // don't prune unused code for us. Both lists should be kept in sync.
+ _ "github.com/99designs/gqlgen/graphql"
+ _ "github.com/99designs/gqlgen/graphql/introspection"
+ "github.com/99designs/gqlgen/internal/gopath"
+ _ "github.com/vektah/gqlparser"
+ _ "github.com/vektah/gqlparser/ast"
)
// These imports are referenced by the generated code, and are assumed to have the
@@ -18,12 +25,12 @@ var ambientImports = []string{
"strconv",
"time",
"sync",
- "github.com/vektah/gqlgen/neelance/introspection",
- "github.com/vektah/gqlgen/neelance/errors",
- "github.com/vektah/gqlgen/neelance/query",
- "github.com/vektah/gqlgen/neelance/schema",
- "github.com/vektah/gqlgen/neelance/validation",
- "github.com/vektah/gqlgen/graphql",
+ "errors",
+
+ "github.com/vektah/gqlparser",
+ "github.com/vektah/gqlparser/ast",
+ "github.com/99designs/gqlgen/graphql",
+ "github.com/99designs/gqlgen/graphql/introspection",
}
func buildImports(types NamedTypes, destDir string) *Imports {
@@ -48,7 +55,8 @@ func (s *Imports) add(path string) *Import {
return nil
}
- if stringHasSuffixFold(s.destDir, path) {
+ // if we are referencing our own package we dont need an import
+ if gopath.MustDir2Import(s.destDir) == path {
return nil
}
@@ -70,10 +78,6 @@ func (s *Imports) add(path string) *Import {
return imp
}
-func stringHasSuffixFold(s, suffix string) bool {
- return len(s) >= len(suffix) && strings.EqualFold(s[len(s)-len(suffix):], suffix)
-}
-
func (s Imports) finalize() []*Import {
// ensure stable ordering by sorting
sort.Slice(s.imports, func(i, j int) bool {
diff --git a/vendor/github.com/vektah/gqlgen/codegen/input_build.go b/vendor/github.com/99designs/gqlgen/codegen/input_build.go
index 98b25b8b..06ff37a0 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/input_build.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/input_build.go
@@ -3,10 +3,9 @@ package codegen
import (
"go/types"
"sort"
- "strings"
"github.com/pkg/errors"
- "github.com/vektah/gqlgen/neelance/schema"
+ "github.com/vektah/gqlparser/ast"
"golang.org/x/tools/go/loader"
)
@@ -14,9 +13,9 @@ func (cfg *Config) buildInputs(namedTypes NamedTypes, prog *loader.Program, impo
var inputs Objects
for _, typ := range cfg.schema.Types {
- switch typ := typ.(type) {
- case *schema.InputObject:
- input, err := buildInput(namedTypes, typ)
+ switch typ.Kind {
+ case ast.InputObject:
+ input, err := cfg.buildInput(namedTypes, typ)
if err != nil {
return nil, err
}
@@ -27,7 +26,7 @@ func (cfg *Config) buildInputs(namedTypes NamedTypes, prog *loader.Program, impo
}
if def != nil {
input.Marshaler = buildInputMarshaler(typ, def)
- bindErrs := bindObject(def.Type(), input, imports)
+ bindErrs := bindObject(def.Type(), input, imports, cfg.StructTag)
if len(bindErrs) > 0 {
return nil, bindErrs
}
@@ -38,24 +37,35 @@ func (cfg *Config) buildInputs(namedTypes NamedTypes, prog *loader.Program, impo
}
sort.Slice(inputs, func(i, j int) bool {
- return strings.Compare(inputs[i].GQLType, inputs[j].GQLType) == -1
+ return inputs[i].GQLType < inputs[j].GQLType
})
return inputs, nil
}
-func buildInput(types NamedTypes, typ *schema.InputObject) (*Object, error) {
- obj := &Object{NamedType: types[typ.TypeName()]}
+func (cfg *Config) buildInput(types NamedTypes, typ *ast.Definition) (*Object, error) {
+ obj := &Object{NamedType: types[typ.Name]}
+ typeEntry, entryExists := cfg.Models[typ.Name]
- for _, field := range typ.Values {
+ for _, field := range typ.Fields {
newField := Field{
- GQLName: field.Name.Name,
+ GQLName: field.Name,
Type: types.getType(field.Type),
Object: obj,
}
- if field.Default != nil {
- newField.Default = field.Default.Value(nil)
+ if entryExists {
+ if typeField, ok := typeEntry.Fields[field.Name]; ok {
+ newField.GoFieldName = typeField.FieldName
+ }
+ }
+
+ if field.DefaultValue != nil {
+ var err error
+ newField.Default, err = field.DefaultValue.Value(nil)
+ if err != nil {
+ return nil, errors.Errorf("default value for %s.%s is not valid: %s", typ.Name, field.Name, err.Error())
+ }
}
if !newField.Type.IsInput && !newField.Type.IsScalar {
@@ -70,7 +80,7 @@ func buildInput(types NamedTypes, typ *schema.InputObject) (*Object, error) {
// if user has implemented an UnmarshalGQL method on the input type manually, use it
// otherwise we will generate one.
-func buildInputMarshaler(typ *schema.InputObject, def types.Object) *Ref {
+func buildInputMarshaler(typ *ast.Definition, def types.Object) *Ref {
switch def := def.(type) {
case *types.TypeName:
namedType := def.Type().(*types.Named)
diff --git a/vendor/github.com/vektah/gqlgen/codegen/interface.go b/vendor/github.com/99designs/gqlgen/codegen/interface.go
index 2de0c88a..2de0c88a 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/interface.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/interface.go
diff --git a/vendor/github.com/vektah/gqlgen/codegen/interface_build.go b/vendor/github.com/99designs/gqlgen/codegen/interface_build.go
index cdf0f597..9f4a4ff4 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/interface_build.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/interface_build.go
@@ -5,63 +5,39 @@ import (
"go/types"
"os"
"sort"
- "strings"
- "github.com/vektah/gqlgen/neelance/schema"
+ "github.com/vektah/gqlparser/ast"
"golang.org/x/tools/go/loader"
)
func (cfg *Config) buildInterfaces(types NamedTypes, prog *loader.Program) []*Interface {
var interfaces []*Interface
for _, typ := range cfg.schema.Types {
- switch typ := typ.(type) {
- case *schema.Union, *schema.Interface:
+ if typ.Kind == ast.Union || typ.Kind == ast.Interface {
interfaces = append(interfaces, cfg.buildInterface(types, typ, prog))
- default:
- continue
}
}
sort.Slice(interfaces, func(i, j int) bool {
- return strings.Compare(interfaces[i].GQLType, interfaces[j].GQLType) == -1
+ return interfaces[i].GQLType < interfaces[j].GQLType
})
return interfaces
}
-func (cfg *Config) buildInterface(types NamedTypes, typ schema.NamedType, prog *loader.Program) *Interface {
- switch typ := typ.(type) {
+func (cfg *Config) buildInterface(types NamedTypes, typ *ast.Definition, prog *loader.Program) *Interface {
+ i := &Interface{NamedType: types[typ.Name]}
- case *schema.Union:
- i := &Interface{NamedType: types[typ.TypeName()]}
+ for _, implementor := range cfg.schema.GetPossibleTypes(typ) {
+ t := types[implementor.Name]
- for _, implementor := range typ.PossibleTypes {
- t := types[implementor.TypeName()]
-
- i.Implementors = append(i.Implementors, InterfaceImplementor{
- NamedType: t,
- ValueReceiver: cfg.isValueReceiver(types[typ.Name], t, prog),
- })
- }
-
- return i
-
- case *schema.Interface:
- i := &Interface{NamedType: types[typ.TypeName()]}
-
- for _, implementor := range typ.PossibleTypes {
- t := types[implementor.TypeName()]
-
- i.Implementors = append(i.Implementors, InterfaceImplementor{
- NamedType: t,
- ValueReceiver: cfg.isValueReceiver(types[typ.Name], t, prog),
- })
- }
-
- return i
- default:
- panic(fmt.Errorf("unknown interface %#v", typ))
+ i.Implementors = append(i.Implementors, InterfaceImplementor{
+ NamedType: t,
+ ValueReceiver: cfg.isValueReceiver(types[typ.Name], t, prog),
+ })
}
+
+ return i
}
func (cfg *Config) isValueReceiver(intf *NamedType, implementor *NamedType, prog *loader.Program) bool {
diff --git a/vendor/github.com/99designs/gqlgen/codegen/model.go b/vendor/github.com/99designs/gqlgen/codegen/model.go
new file mode 100644
index 00000000..5ba50337
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/model.go
@@ -0,0 +1,16 @@
+package codegen
+
+type Model struct {
+ *NamedType
+ Description string
+ Fields []ModelField
+}
+
+type ModelField struct {
+ *Type
+ GQLName string
+ GoFieldName string
+ GoFKName string
+ GoFKType string
+ Description string
+}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/models_build.go b/vendor/github.com/99designs/gqlgen/codegen/models_build.go
index 211d4bd4..9f98a07d 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/models_build.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/models_build.go
@@ -2,20 +2,19 @@ package codegen
import (
"sort"
- "strings"
- "github.com/vektah/gqlgen/neelance/schema"
+ "github.com/vektah/gqlparser/ast"
"golang.org/x/tools/go/loader"
)
-func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model, error) {
+func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program, imports *Imports) ([]Model, error) {
var models []Model
for _, typ := range cfg.schema.Types {
var model Model
- switch typ := typ.(type) {
- case *schema.Object:
- obj, err := cfg.buildObject(types, typ)
+ switch typ.Kind {
+ case ast.Object:
+ obj, err := cfg.buildObject(types, typ, imports)
if err != nil {
return nil, err
}
@@ -23,8 +22,8 @@ func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model,
continue
}
model = cfg.obj2Model(obj)
- case *schema.InputObject:
- obj, err := buildInput(types, typ)
+ case ast.InputObject:
+ obj, err := cfg.buildInput(types, typ)
if err != nil {
return nil, err
}
@@ -32,7 +31,7 @@ func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model,
continue
}
model = cfg.obj2Model(obj)
- case *schema.Interface, *schema.Union:
+ case ast.Interface, ast.Union:
intf := cfg.buildInterface(types, typ, prog)
if intf.IsUserDefined {
continue
@@ -41,12 +40,13 @@ func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model,
default:
continue
}
+ model.Description = typ.Description // It's this or change both obj2Model and buildObject
models = append(models, model)
}
sort.Slice(models, func(i, j int) bool {
- return strings.Compare(models[i].GQLType, models[j].GQLType) == -1
+ return models[i].GQLType < models[j].GQLType
})
return models, nil
@@ -65,11 +65,10 @@ func (cfg *Config) obj2Model(obj *Object) Model {
field := &obj.Fields[i]
mf := ModelField{Type: field.Type, GQLName: field.GQLName}
- mf.GoVarName = ucFirst(field.GQLName)
- if mf.IsScalar {
- if mf.GoVarName == "Id" {
- mf.GoVarName = "ID"
- }
+ if field.GoFieldName != "" {
+ mf.GoFieldName = field.GoFieldName
+ } else {
+ mf.GoFieldName = field.GoNameExported()
}
model.Fields = append(model.Fields, mf)
diff --git a/vendor/github.com/99designs/gqlgen/codegen/object.go b/vendor/github.com/99designs/gqlgen/codegen/object.go
new file mode 100644
index 00000000..d9f610f4
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/object.go
@@ -0,0 +1,464 @@
+package codegen
+
+import (
+ "bytes"
+ "fmt"
+ "strconv"
+ "strings"
+ "text/template"
+ "unicode"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+type GoFieldType int
+
+const (
+ GoFieldUndefined GoFieldType = iota
+ GoFieldMethod
+ GoFieldVariable
+)
+
+type Object struct {
+ *NamedType
+
+ Fields []Field
+ Satisfies []string
+ ResolverInterface *Ref
+ Root bool
+ DisableConcurrency bool
+ Stream bool
+}
+
+type Field struct {
+ *Type
+ Description string // Description of a field
+ GQLName string // The name of the field in graphql
+ GoFieldType GoFieldType // The field type in go, if any
+ GoReceiverName string // The name of method & var receiver in go, if any
+ GoFieldName string // The name of the method or var in go, if any
+ Args []FieldArgument // A list of arguments to be passed to this field
+ ForceResolver bool // Should be emit Resolver method
+ NoErr bool // If this is bound to a go method, does that method have an error as the second argument
+ Object *Object // A link back to the parent object
+ Default interface{} // The default value
+}
+
+type FieldArgument struct {
+ *Type
+
+ GQLName string // The name of the argument in graphql
+ GoVarName string // The name of the var in go
+ Object *Object // A link back to the parent object
+ Default interface{} // The default value
+}
+
+type Objects []*Object
+
+func (o *Object) Implementors() string {
+ satisfiedBy := strconv.Quote(o.GQLType)
+ for _, s := range o.Satisfies {
+ satisfiedBy += ", " + strconv.Quote(s)
+ }
+ return "[]string{" + satisfiedBy + "}"
+}
+
+func (o *Object) HasResolvers() bool {
+ for _, f := range o.Fields {
+ if f.IsResolver() {
+ return true
+ }
+ }
+ return false
+}
+
+func (o *Object) IsConcurrent() bool {
+ for _, f := range o.Fields {
+ if f.IsConcurrent() {
+ return true
+ }
+ }
+ return false
+}
+
+func (o *Object) IsReserved() bool {
+ return strings.HasPrefix(o.GQLType, "__")
+}
+
+func (f *Field) IsResolver() bool {
+ return f.GoFieldName == ""
+}
+
+func (f *Field) IsReserved() bool {
+ return strings.HasPrefix(f.GQLName, "__")
+}
+
+func (f *Field) IsMethod() bool {
+ return f.GoFieldType == GoFieldMethod
+}
+
+func (f *Field) IsVariable() bool {
+ return f.GoFieldType == GoFieldVariable
+}
+
+func (f *Field) IsConcurrent() bool {
+ return f.IsResolver() && !f.Object.DisableConcurrency
+}
+
+func (f *Field) GoNameExported() string {
+ return lintName(ucFirst(f.GQLName))
+}
+
+func (f *Field) GoNameUnexported() string {
+ return lintName(f.GQLName)
+}
+
+func (f *Field) ShortInvocation() string {
+ if !f.IsResolver() {
+ return ""
+ }
+
+ return fmt.Sprintf("%s().%s(%s)", f.Object.GQLType, f.GoNameExported(), f.CallArgs())
+}
+
+func (f *Field) ArgsFunc() string {
+ if len(f.Args) == 0 {
+ return ""
+ }
+
+ return "field_" + f.Object.GQLType + "_" + f.GQLName + "_args"
+}
+
+func (f *Field) ResolverType() string {
+ if !f.IsResolver() {
+ return ""
+ }
+
+ return fmt.Sprintf("%s().%s(%s)", f.Object.GQLType, f.GoNameExported(), f.CallArgs())
+}
+
+func (f *Field) ShortResolverDeclaration() string {
+ if !f.IsResolver() {
+ return ""
+ }
+ res := fmt.Sprintf("%s(ctx context.Context", f.GoNameExported())
+
+ if !f.Object.Root {
+ res += fmt.Sprintf(", obj *%s", f.Object.FullName())
+ }
+ for _, arg := range f.Args {
+ res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
+ }
+
+ result := f.Signature()
+ if f.Object.Stream {
+ result = "<-chan " + result
+ }
+
+ res += fmt.Sprintf(") (%s, error)", result)
+ return res
+}
+
+func (f *Field) ResolverDeclaration() string {
+ if !f.IsResolver() {
+ return ""
+ }
+ res := fmt.Sprintf("%s_%s(ctx context.Context", f.Object.GQLType, f.GoNameUnexported())
+
+ if !f.Object.Root {
+ res += fmt.Sprintf(", obj *%s", f.Object.FullName())
+ }
+ for _, arg := range f.Args {
+ res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
+ }
+
+ result := f.Signature()
+ if f.Object.Stream {
+ result = "<-chan " + result
+ }
+
+ res += fmt.Sprintf(") (%s, error)", result)
+ return res
+}
+
+func (f *Field) ComplexitySignature() string {
+ res := fmt.Sprintf("func(childComplexity int")
+ for _, arg := range f.Args {
+ res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
+ }
+ res += ") int"
+ return res
+}
+
+func (f *Field) ComplexityArgs() string {
+ var args []string
+ for _, arg := range f.Args {
+ args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
+ }
+
+ return strings.Join(args, ", ")
+}
+
+func (f *Field) CallArgs() string {
+ var args []string
+
+ if f.IsResolver() {
+ args = append(args, "ctx")
+
+ if !f.Object.Root {
+ args = append(args, "obj")
+ }
+ }
+
+ for _, arg := range f.Args {
+ args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
+ }
+
+ return strings.Join(args, ", ")
+}
+
+// should be in the template, but its recursive and has a bunch of args
+func (f *Field) WriteJson() string {
+ return f.doWriteJson("res", f.Type.Modifiers, f.ASTType, false, 1)
+}
+
+func (f *Field) doWriteJson(val string, remainingMods []string, astType *ast.Type, isPtr bool, depth int) string {
+ switch {
+ case len(remainingMods) > 0 && remainingMods[0] == modPtr:
+ return tpl(`
+ if {{.val}} == nil {
+ {{- if .nonNull }}
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ {{- end }}
+ return graphql.Null
+ }
+ {{.next }}`, map[string]interface{}{
+ "val": val,
+ "nonNull": astType.NonNull,
+ "next": f.doWriteJson(val, remainingMods[1:], astType, true, depth+1),
+ })
+
+ case len(remainingMods) > 0 && remainingMods[0] == modList:
+ if isPtr {
+ val = "*" + val
+ }
+ var arr = "arr" + strconv.Itoa(depth)
+ var index = "idx" + strconv.Itoa(depth)
+ var usePtr bool
+ if len(remainingMods) == 1 && !isPtr {
+ usePtr = true
+ }
+
+ return tpl(`
+ {{.arr}} := make(graphql.Array, len({{.val}}))
+ {{ if and .top (not .isScalar) }} var wg sync.WaitGroup {{ end }}
+ {{ if not .isScalar }}
+ isLen1 := len({{.val}}) == 1
+ if !isLen1 {
+ wg.Add(len({{.val}}))
+ }
+ {{ end }}
+ for {{.index}} := range {{.val}} {
+ {{- if not .isScalar }}
+ {{.index}} := {{.index}}
+ rctx := &graphql.ResolverContext{
+ Index: &{{.index}},
+ Result: {{ if .usePtr }}&{{end}}{{.val}}[{{.index}}],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func({{.index}} int) {
+ if !isLen1 {
+ defer wg.Done()
+ }
+ {{.arr}}[{{.index}}] = func() graphql.Marshaler {
+ {{ .next }}
+ }()
+ }
+ if isLen1 {
+ f({{.index}})
+ } else {
+ go f({{.index}})
+ }
+ {{ else }}
+ {{.arr}}[{{.index}}] = func() graphql.Marshaler {
+ {{ .next }}
+ }()
+ {{- end}}
+ }
+ {{ if and .top (not .isScalar) }} wg.Wait() {{ end }}
+ return {{.arr}}`, map[string]interface{}{
+ "val": val,
+ "arr": arr,
+ "index": index,
+ "top": depth == 1,
+ "arrayLen": len(val),
+ "isScalar": f.IsScalar,
+ "usePtr": usePtr,
+ "next": f.doWriteJson(val+"["+index+"]", remainingMods[1:], astType.Elem, false, depth+1),
+ })
+
+ case f.IsScalar:
+ if isPtr {
+ val = "*" + val
+ }
+ return f.Marshal(val)
+
+ default:
+ if !isPtr {
+ val = "&" + val
+ }
+ return tpl(`
+ return ec._{{.type}}(ctx, field.Selections, {{.val}})`, map[string]interface{}{
+ "type": f.GQLType,
+ "val": val,
+ })
+ }
+}
+
+func (f *FieldArgument) Stream() bool {
+ return f.Object != nil && f.Object.Stream
+}
+
+func (os Objects) ByName(name string) *Object {
+ for i, o := range os {
+ if strings.EqualFold(o.GQLType, name) {
+ return os[i]
+ }
+ }
+ return nil
+}
+
+func tpl(tpl string, vars map[string]interface{}) string {
+ b := &bytes.Buffer{}
+ err := template.Must(template.New("inline").Parse(tpl)).Execute(b, vars)
+ if err != nil {
+ panic(err)
+ }
+ return b.String()
+}
+
+func ucFirst(s string) string {
+ if s == "" {
+ return ""
+ }
+
+ r := []rune(s)
+ r[0] = unicode.ToUpper(r[0])
+ return string(r)
+}
+
+// copy from https://github.com/golang/lint/blob/06c8688daad7faa9da5a0c2f163a3d14aac986ca/lint.go#L679
+
+// lintName returns a different name if it should be different.
+func lintName(name string) (should string) {
+ // Fast path for simple cases: "_" and all lowercase.
+ if name == "_" {
+ return name
+ }
+ allLower := true
+ for _, r := range name {
+ if !unicode.IsLower(r) {
+ allLower = false
+ break
+ }
+ }
+ if allLower {
+ return name
+ }
+
+ // Split camelCase at any lower->upper transition, and split on underscores.
+ // Check each word for common initialisms.
+ runes := []rune(name)
+ w, i := 0, 0 // index of start of word, scan
+ for i+1 <= len(runes) {
+ eow := false // whether we hit the end of a word
+ if i+1 == len(runes) {
+ eow = true
+ } else if runes[i+1] == '_' {
+ // underscore; shift the remainder forward over any run of underscores
+ eow = true
+ n := 1
+ for i+n+1 < len(runes) && runes[i+n+1] == '_' {
+ n++
+ }
+
+ // Leave at most one underscore if the underscore is between two digits
+ if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) {
+ n--
+ }
+
+ copy(runes[i+1:], runes[i+n+1:])
+ runes = runes[:len(runes)-n]
+ } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) {
+ // lower->non-lower
+ eow = true
+ }
+ i++
+ if !eow {
+ continue
+ }
+
+ // [w,i) is a word.
+ word := string(runes[w:i])
+ if u := strings.ToUpper(word); commonInitialisms[u] {
+ // Keep consistent case, which is lowercase only at the start.
+ if w == 0 && unicode.IsLower(runes[w]) {
+ u = strings.ToLower(u)
+ }
+ // All the common initialisms are ASCII,
+ // so we can replace the bytes exactly.
+ copy(runes[w:], []rune(u))
+ } else if w > 0 && strings.ToLower(word) == word {
+ // already all lowercase, and not the first word, so uppercase the first character.
+ runes[w] = unicode.ToUpper(runes[w])
+ }
+ w = i
+ }
+ return string(runes)
+}
+
+// commonInitialisms is a set of common initialisms.
+// Only add entries that are highly unlikely to be non-initialisms.
+// For instance, "ID" is fine (Freudian code is rare), but "AND" is not.
+var commonInitialisms = map[string]bool{
+ "ACL": true,
+ "API": true,
+ "ASCII": true,
+ "CPU": true,
+ "CSS": true,
+ "DNS": true,
+ "EOF": true,
+ "GUID": true,
+ "HTML": true,
+ "HTTP": true,
+ "HTTPS": true,
+ "ID": true,
+ "IP": true,
+ "JSON": true,
+ "LHS": true,
+ "QPS": true,
+ "RAM": true,
+ "RHS": true,
+ "RPC": true,
+ "SLA": true,
+ "SMTP": true,
+ "SQL": true,
+ "SSH": true,
+ "TCP": true,
+ "TLS": true,
+ "TTL": true,
+ "UDP": true,
+ "UI": true,
+ "UID": true,
+ "UUID": true,
+ "URI": true,
+ "URL": true,
+ "UTF8": true,
+ "VM": true,
+ "XML": true,
+ "XMPP": true,
+ "XSRF": true,
+ "XSS": true,
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/object_build.go b/vendor/github.com/99designs/gqlgen/codegen/object_build.go
new file mode 100644
index 00000000..ee2b2f1c
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/object_build.go
@@ -0,0 +1,181 @@
+package codegen
+
+import (
+ "log"
+ "sort"
+
+ "github.com/pkg/errors"
+ "github.com/vektah/gqlparser/ast"
+ "golang.org/x/tools/go/loader"
+)
+
+func (cfg *Config) buildObjects(types NamedTypes, prog *loader.Program, imports *Imports) (Objects, error) {
+ var objects Objects
+
+ for _, typ := range cfg.schema.Types {
+ if typ.Kind != ast.Object {
+ continue
+ }
+
+ obj, err := cfg.buildObject(types, typ, imports)
+ if err != nil {
+ return nil, err
+ }
+
+ def, err := findGoType(prog, obj.Package, obj.GoType)
+ if err != nil {
+ return nil, err
+ }
+ if def != nil {
+ for _, bindErr := range bindObject(def.Type(), obj, imports, cfg.StructTag) {
+ log.Println(bindErr.Error())
+ log.Println(" Adding resolver method")
+ }
+ }
+
+ objects = append(objects, obj)
+ }
+
+ sort.Slice(objects, func(i, j int) bool {
+ return objects[i].GQLType < objects[j].GQLType
+ })
+
+ return objects, nil
+}
+
+var keywords = []string{
+ "break",
+ "default",
+ "func",
+ "interface",
+ "select",
+ "case",
+ "defer",
+ "go",
+ "map",
+ "struct",
+ "chan",
+ "else",
+ "goto",
+ "package",
+ "switch",
+ "const",
+ "fallthrough",
+ "if",
+ "range",
+ "type",
+ "continue",
+ "for",
+ "import",
+ "return",
+ "var",
+}
+
+// sanitizeArgName prevents collisions with go keywords for arguments to resolver functions
+func sanitizeArgName(name string) string {
+ for _, k := range keywords {
+ if name == k {
+ return name + "Arg"
+ }
+ }
+ return name
+}
+
+func (cfg *Config) buildObject(types NamedTypes, typ *ast.Definition, imports *Imports) (*Object, error) {
+ obj := &Object{NamedType: types[typ.Name]}
+ typeEntry, entryExists := cfg.Models[typ.Name]
+
+ imp := imports.findByPath(cfg.Exec.ImportPath())
+ obj.ResolverInterface = &Ref{GoType: obj.GQLType + "Resolver", Import: imp}
+
+ if typ == cfg.schema.Query {
+ obj.Root = true
+ }
+
+ if typ == cfg.schema.Mutation {
+ obj.Root = true
+ obj.DisableConcurrency = true
+ }
+
+ if typ == cfg.schema.Subscription {
+ obj.Root = true
+ obj.Stream = true
+ }
+
+ obj.Satisfies = append(obj.Satisfies, typ.Interfaces...)
+
+ for _, field := range typ.Fields {
+ if typ == cfg.schema.Query && field.Name == "__type" {
+ obj.Fields = append(obj.Fields, Field{
+ Type: &Type{types["__Schema"], []string{modPtr}, ast.NamedType("__Schema", nil), nil},
+ GQLName: "__schema",
+ NoErr: true,
+ GoFieldType: GoFieldMethod,
+ GoReceiverName: "ec",
+ GoFieldName: "introspectSchema",
+ Object: obj,
+ Description: field.Description,
+ })
+ continue
+ }
+ if typ == cfg.schema.Query && field.Name == "__schema" {
+ obj.Fields = append(obj.Fields, Field{
+ Type: &Type{types["__Type"], []string{modPtr}, ast.NamedType("__Schema", nil), nil},
+ GQLName: "__type",
+ NoErr: true,
+ GoFieldType: GoFieldMethod,
+ GoReceiverName: "ec",
+ GoFieldName: "introspectType",
+ Args: []FieldArgument{
+ {GQLName: "name", Type: &Type{types["String"], []string{}, ast.NamedType("String", nil), nil}, Object: &Object{}},
+ },
+ Object: obj,
+ })
+ continue
+ }
+
+ var forceResolver bool
+ var goName string
+ if entryExists {
+ if typeField, ok := typeEntry.Fields[field.Name]; ok {
+ goName = typeField.FieldName
+ forceResolver = typeField.Resolver
+ }
+ }
+
+ var args []FieldArgument
+ for _, arg := range field.Arguments {
+ newArg := FieldArgument{
+ GQLName: arg.Name,
+ Type: types.getType(arg.Type),
+ Object: obj,
+ GoVarName: sanitizeArgName(arg.Name),
+ }
+
+ if !newArg.Type.IsInput && !newArg.Type.IsScalar {
+ return nil, errors.Errorf("%s cannot be used as argument of %s.%s. only input and scalar types are allowed", arg.Type, obj.GQLType, field.Name)
+ }
+
+ if arg.DefaultValue != nil {
+ var err error
+ newArg.Default, err = arg.DefaultValue.Value(nil)
+ if err != nil {
+ return nil, errors.Errorf("default value for %s.%s is not valid: %s", typ.Name, field.Name, err.Error())
+ }
+ newArg.StripPtr()
+ }
+ args = append(args, newArg)
+ }
+
+ obj.Fields = append(obj.Fields, Field{
+ GQLName: field.Name,
+ Type: types.getType(field.Type),
+ Args: args,
+ Object: obj,
+ GoFieldName: goName,
+ ForceResolver: forceResolver,
+ })
+ }
+
+ return obj, nil
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/args.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/args.gotpl
new file mode 100644
index 00000000..870a99ed
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/args.gotpl
@@ -0,0 +1,13 @@
+ args := map[string]interface{}{}
+ {{- range $i, $arg := . }}
+ var arg{{$i}} {{$arg.Signature }}
+ if tmp, ok := rawArgs[{{$arg.GQLName|quote}}]; ok {
+ var err error
+ {{$arg.Unmarshal (print "arg" $i) "tmp" }}
+ if err != nil {
+ return nil, err
+ }
+ }
+ args[{{$arg.GQLName|quote}}] = arg{{$i}}
+ {{- end }}
+ return args, nil
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/data.go b/vendor/github.com/99designs/gqlgen/codegen/templates/data.go
new file mode 100644
index 00000000..d168fa31
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/data.go
@@ -0,0 +1,13 @@
+package templates
+
+var data = map[string]string{
+ "args.gotpl": "\targs := map[string]interface{}{}\n\t{{- range $i, $arg := . }}\n\t\tvar arg{{$i}} {{$arg.Signature }}\n\t\tif tmp, ok := rawArgs[{{$arg.GQLName|quote}}]; ok {\n\t\t\tvar err error\n\t\t\t{{$arg.Unmarshal (print \"arg\" $i) \"tmp\" }}\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\targs[{{$arg.GQLName|quote}}] = arg{{$i}}\n\t{{- end }}\n\treturn args, nil\n",
+ "field.gotpl": "{{ $field := . }}\n{{ $object := $field.Object }}\n\n{{- if $object.Stream }}\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {\n\t\t{{- if $field.Args }}\n\t\t\trawArgs := field.ArgumentMap(ec.Variables)\n\t\t\targs, err := {{ $field.ArgsFunc }}(rawArgs)\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\treturn nil\n\t\t\t}\n\t\t{{- end }}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\tField: field,\n\t\t})\n\t\tresults, err := ec.resolvers.{{ $field.ShortInvocation }}\n\t\tif err != nil {\n\t\t\tec.Error(ctx, err)\n\t\t\treturn nil\n\t\t}\n\t\treturn func() graphql.Marshaler {\n\t\t\tres, ok := <-results\n\t\t\tif !ok {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tvar out graphql.OrderedMap\n\t\t\tout.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())\n\t\t\treturn &out\n\t\t}\n\t}\n{{ else }}\n\t// nolint: vetshadow\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {\n\t\t{{- if $field.Args }}\n\t\t\trawArgs := field.ArgumentMap(ec.Variables)\n\t\t\targs, err := {{ $field.ArgsFunc }}(rawArgs)\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\treturn graphql.Null\n\t\t\t}\n\t\t{{- end }}\n\t\trctx := &graphql.ResolverContext{\n\t\t\tObject: {{$object.GQLType|quote}},\n\t\t\tArgs: {{if $field.Args }}args{{else}}nil{{end}},\n\t\t\tField: field,\n\t\t}\n\t\tctx = graphql.WithResolverContext(ctx, rctx)\n\t\tresTmp := ec.FieldMiddleware(ctx, {{if $object.Root}}nil{{else}}obj{{end}}, func(ctx context.Context) (interface{}, error) {\n\t\t\t{{- if $field.IsResolver }}\n\t\t\t\treturn ec.resolvers.{{ $field.ShortInvocation }}\n\t\t\t{{- else if $field.IsMethod }}\n\t\t\t\t{{- if $field.NoErr }}\n\t\t\t\t\treturn {{$field.GoReceiverName}}.{{$field.GoFieldName}}({{ $field.CallArgs }}), nil\n\t\t\t\t{{- else }}\n\t\t\t\t\treturn {{$field.GoReceiverName}}.{{$field.GoFieldName}}({{ $field.CallArgs }})\n\t\t\t\t{{- end }}\n\t\t\t{{- else if $field.IsVariable }}\n\t\t\t\treturn {{$field.GoReceiverName}}.{{$field.GoFieldName}}, nil\n\t\t\t{{- end }}\n\t\t})\n\t\tif resTmp == nil {\n\t\t\t{{- if $field.ASTType.NonNull }}\n\t\t\t\tif !ec.HasError(rctx) {\n\t\t\t\t\tec.Errorf(ctx, \"must not be null\")\n\t\t\t\t}\n\t\t\t{{- end }}\n\t\t\treturn graphql.Null\n\t\t}\n\t\tres := resTmp.({{$field.Signature}})\n\t\trctx.Result = res\n\t\t{{ $field.WriteJson }}\n\t}\n{{ end }}\n",
+ "generated.gotpl": "// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\n// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.\nfunc NewExecutableSchema(cfg Config) graphql.ExecutableSchema {\n\treturn &executableSchema{\n\t\tresolvers: cfg.Resolvers,\n\t\tdirectives: cfg.Directives,\n\t\tcomplexity: cfg.Complexity,\n\t}\n}\n\ntype Config struct {\n\tResolvers ResolverRoot\n\tDirectives DirectiveRoot\n\tComplexity ComplexityRoot\n}\n\ntype ResolverRoot interface {\n{{- range $object := .Objects -}}\n\t{{ if $object.HasResolvers -}}\n\t\t{{$object.GQLType}}() {{$object.GQLType}}Resolver\n\t{{ end }}\n{{- end }}\n}\n\ntype DirectiveRoot struct {\n{{ range $directive := .Directives }}\n\t{{ $directive.Declaration }}\n{{ end }}\n}\n\ntype ComplexityRoot struct {\n{{ range $object := .Objects }}\n\t{{ if not $object.IsReserved -}}\n\t\t{{ $object.GQLType|toCamel }} struct {\n\t\t{{ range $field := $object.Fields -}}\n\t\t\t{{ if not $field.IsReserved -}}\n\t\t\t\t{{ $field.GQLName|toCamel }} {{ $field.ComplexitySignature }}\n\t\t\t{{ end }}\n\t\t{{- end }}\n\t\t}\n\t{{- end }}\n{{ end }}\n}\n\n{{ range $object := .Objects -}}\n\t{{ if $object.HasResolvers }}\n\t\ttype {{$object.GQLType}}Resolver interface {\n\t\t{{ range $field := $object.Fields -}}\n\t\t\t{{ $field.ShortResolverDeclaration }}\n\t\t{{ end }}\n\t\t}\n\t{{- end }}\n{{- end }}\n\n{{ range $object := .Objects -}}\n\t{{ range $field := $object.Fields -}}\n\t\t{{ if $field.Args }}\n\t\t\tfunc {{ $field.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {\n\t\t\t{{ template \"args.gotpl\" $field.Args }}\n\t\t\t}\n\t\t{{ end }}\n\t{{ end }}\n{{- end }}\n\n{{ range $directive := .Directives }}\n\t{{ if $directive.Args }}\n\t\tfunc {{ $directive.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {\n\t\t{{ template \"args.gotpl\" $directive.Args }}\n\t\t}\n\t{{ end }}\n{{ end }}\n\ntype executableSchema struct {\n\tresolvers ResolverRoot\n\tdirectives DirectiveRoot\n\tcomplexity ComplexityRoot\n}\n\nfunc (e *executableSchema) Schema() *ast.Schema {\n\treturn parsedSchema\n}\n\nfunc (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {\n\tswitch typeName + \".\" + field {\n\t{{ range $object := .Objects }}\n\t\t{{ if not $object.IsReserved }}\n\t\t\t{{ range $field := $object.Fields }}\n\t\t\t\t{{ if not $field.IsReserved }}\n\t\t\t\t\tcase \"{{$object.GQLType}}.{{$field.GQLName}}\":\n\t\t\t\t\t\tif e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}} == nil {\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{{ if $field.Args }}\n\t\t\t\t\t\t\targs, err := {{ $field.ArgsFunc }}(rawArgs)\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\treturn 0, false\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t{{ end }}\n\t\t\t\t\t\treturn e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}}(childComplexity{{if $field.Args}}, {{$field.ComplexityArgs}} {{end}}), true\n\t\t\t\t{{ end }}\n\t\t\t{{ end }}\n\t\t{{ end }}\n\t{{ end }}\n\t}\n\treturn 0, false\n}\n\nfunc (e *executableSchema) Query(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {\n\t{{- if .QueryRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.QueryRoot.GQLType}}(ctx, op.SelectionSet)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"queries are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {\n\t{{- if .MutationRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.MutationRoot.GQLType}}(ctx, op.SelectionSet)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"mutations are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Subscription(ctx context.Context, op *ast.OperationDefinition) func() *graphql.Response {\n\t{{- if .SubscriptionRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e}\n\n\t\tnext := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.SelectionSet)\n\t\tif ec.Errors != nil {\n\t\t\treturn graphql.OneShot(&graphql.Response{Data: []byte(\"null\"), Errors: ec.Errors})\n\t\t}\n\n\t\tvar buf bytes.Buffer\n\t\treturn func() *graphql.Response {\n\t\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\t\tbuf.Reset()\n\t\t\t\tdata := next()\n\n\t\t\t\tif data == nil {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\tdata.MarshalGQL(&buf)\n\t\t\t\treturn buf.Bytes()\n\t\t\t})\n\n\t\t\tif buf == nil {\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t\treturn &graphql.Response{\n\t\t\t\tData: buf,\n\t\t\t\tErrors: ec.Errors,\n\t\t\t}\n\t\t}\n\t{{- else }}\n\t\treturn graphql.OneShot(graphql.ErrorResponse(ctx, \"subscriptions are not supported\"))\n\t{{- end }}\n}\n\ntype executionContext struct {\n\t*graphql.RequestContext\n\t*executableSchema\n}\n\n{{- range $object := .Objects }}\n\t{{ template \"object.gotpl\" $object }}\n\n\t{{- range $field := $object.Fields }}\n\t\t{{ template \"field.gotpl\" $field }}\n\t{{ end }}\n{{- end}}\n\n{{- range $interface := .Interfaces }}\n\t{{ template \"interface.gotpl\" $interface }}\n{{- end }}\n\n{{- range $input := .Inputs }}\n\t{{ template \"input.gotpl\" $input }}\n{{- end }}\n\nfunc (ec *executionContext) FieldMiddleware(ctx context.Context, obj interface{}, next graphql.Resolver) (ret interface{}) {\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\tec.Error(ctx, ec.Recover(ctx, r))\n\t\t\tret = nil\n\t\t}\n\t}()\n\t{{- if .Directives }}\n\trctx := graphql.GetResolverContext(ctx)\n\tfor _, d := range rctx.Field.Definition.Directives {\n\t\tswitch d.Name {\n\t\t{{- range $directive := .Directives }}\n\t\tcase \"{{$directive.Name}}\":\n\t\t\tif ec.directives.{{$directive.Name|ucFirst}} != nil {\n\t\t\t\t{{- if $directive.Args }}\n\t\t\t\t\trawArgs := d.ArgumentMap(ec.Variables)\n\t\t\t\t\targs, err := {{ $directive.ArgsFunc }}(rawArgs)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tec.Error(ctx, err)\n\t\t\t\t\t\treturn nil\n\t\t\t\t\t}\n\t\t\t\t{{- end }}\n\t\t\t\tn := next\n\t\t\t\tnext = func(ctx context.Context) (interface{}, error) {\n\t\t\t\t\treturn ec.directives.{{$directive.Name|ucFirst}}({{$directive.CallArgs}})\n\t\t\t\t}\n\t\t\t}\n\t\t{{- end }}\n\t\t}\n\t}\n\t{{- end }}\n\tres, err := ec.ResolverMiddleware(ctx, next)\n\tif err != nil {\n\t\tec.Error(ctx, err)\n\t\treturn nil\n\t}\n\treturn res\n}\n\nfunc (ec *executionContext) introspectSchema() *introspection.Schema {\n\treturn introspection.WrapSchema(parsedSchema)\n}\n\nfunc (ec *executionContext) introspectType(name string) *introspection.Type {\n\treturn introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name])\n}\n\nvar parsedSchema = gqlparser.MustLoadSchema(\n\t&ast.Source{Name: {{.SchemaFilename|quote}}, Input: {{.SchemaRaw|rawQuote}}},\n)\n",
+ "input.gotpl": "\t{{- if .IsMarshaled }}\n\tfunc Unmarshal{{ .GQLType }}(v interface{}) ({{.FullName}}, error) {\n\t\tvar it {{.FullName}}\n\t\tvar asMap = v.(map[string]interface{})\n\t\t{{ range $field := .Fields}}\n\t\t\t{{- if $field.Default}}\n\t\t\t\tif _, present := asMap[{{$field.GQLName|quote}}] ; !present {\n\t\t\t\t\tasMap[{{$field.GQLName|quote}}] = {{ $field.Default | dump }}\n\t\t\t\t}\n\t\t\t{{- end}}\n\t\t{{- end }}\n\n\t\tfor k, v := range asMap {\n\t\t\tswitch k {\n\t\t\t{{- range $field := .Fields }}\n\t\t\tcase {{$field.GQLName|quote}}:\n\t\t\t\tvar err error\n\t\t\t\t{{ $field.Unmarshal (print \"it.\" $field.GoFieldName) \"v\" }}\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn it, err\n\t\t\t\t}\n\t\t\t{{- end }}\n\t\t\t}\n\t\t}\n\n\t\treturn it, nil\n\t}\n\t{{- end }}\n",
+ "interface.gotpl": "{{- $interface := . }}\n\nfunc (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel ast.SelectionSet, obj *{{$interface.FullName}}) graphql.Marshaler {\n\tswitch obj := (*obj).(type) {\n\tcase nil:\n\t\treturn graphql.Null\n\t{{- range $implementor := $interface.Implementors }}\n\t\t{{- if $implementor.ValueReceiver }}\n\t\t\tcase {{$implementor.FullName}}:\n\t\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, &obj)\n\t\t{{- end}}\n\t\tcase *{{$implementor.FullName}}:\n\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, obj)\n\t{{- end }}\n\tdefault:\n\t\tpanic(fmt.Errorf(\"unexpected type %T\", obj))\n\t}\n}\n",
+ "models.gotpl": "// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\n{{ range $model := .Models }}\n\t{{with .Description}} {{.|prefixLines \"// \"}} {{end}}\n\t{{- if .IsInterface }}\n\t\ttype {{.GoType}} interface {}\n\t{{- else }}\n\t\ttype {{.GoType}} struct {\n\t\t\t{{- range $field := .Fields }}\n\t\t\t\t{{- with .Description}}\n\t\t\t\t\t{{.|prefixLines \"// \"}}\n\t\t\t\t{{- end}}\n\t\t\t\t{{- if $field.GoFieldName }}\n\t\t\t\t\t{{ $field.GoFieldName }} {{$field.Signature}} `json:\"{{$field.GQLName}}\"`\n\t\t\t\t{{- else }}\n\t\t\t\t\t{{ $field.GoFKName }} {{$field.GoFKType}}\n\t\t\t\t{{- end }}\n\t\t\t{{- end }}\n\t\t}\n\t{{- end }}\n{{- end}}\n\n{{ range $enum := .Enums }}\n\t{{with .Description}}{{.|prefixLines \"// \"}} {{end}}\n\ttype {{.GoType}} string\n\tconst (\n\t{{- range $value := .Values}}\n\t\t{{- with .Description}}\n\t\t\t{{.|prefixLines \"// \"}}\n\t\t{{- end}}\n\t\t{{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}}\n\t{{- end }}\n\t)\n\n\tfunc (e {{.GoType}}) IsValid() bool {\n\t\tswitch e {\n\t\tcase {{ range $index, $element := .Values}}{{if $index}},{{end}}{{ $enum.GoType }}{{ $element.Name|toCamel }}{{end}}:\n\t\t\treturn true\n\t\t}\n\t\treturn false\n\t}\n\n\tfunc (e {{.GoType}}) String() string {\n\t\treturn string(e)\n\t}\n\n\tfunc (e *{{.GoType}}) UnmarshalGQL(v interface{}) error {\n\t\tstr, ok := v.(string)\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"enums must be strings\")\n\t\t}\n\n\t\t*e = {{.GoType}}(str)\n\t\tif !e.IsValid() {\n\t\t\treturn fmt.Errorf(\"%s is not a valid {{.GQLType}}\", str)\n\t\t}\n\t\treturn nil\n\t}\n\n\tfunc (e {{.GoType}}) MarshalGQL(w io.Writer) {\n\t\tfmt.Fprint(w, strconv.Quote(e.String()))\n\t}\n\n{{- end }}\n",
+ "object.gotpl": "{{ $object := . }}\n\nvar {{ $object.GQLType|lcFirst}}Implementors = {{$object.Implementors}}\n\n// nolint: gocyclo, errcheck, gas, goconst\n{{- if .Stream }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet) func() graphql.Marshaler {\n\tfields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)\n\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\tObject: {{$object.GQLType|quote}},\n\t})\n\tif len(fields) != 1 {\n\t\tec.Errorf(ctx, \"must subscribe to exactly one stream\")\n\t\treturn nil\n\t}\n\n\tswitch fields[0].Name {\n\t{{- range $field := $object.Fields }}\n\tcase \"{{$field.GQLName}}\":\n\t\treturn ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, fields[0])\n\t{{- end }}\n\tdefault:\n\t\tpanic(\"unknown field \" + strconv.Quote(fields[0].Name))\n\t}\n}\n{{- else }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {\n\tfields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)\n\t{{if $object.Root}}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\tObject: {{$object.GQLType|quote}},\n\t\t})\n\t{{end}}\n\n\t{{if $object.IsConcurrent}} var wg sync.WaitGroup {{end}}\n\tout := graphql.NewOrderedMap(len(fields))\n\tinvalid := false\n\tfor i, field := range fields {\n\t\tout.Keys[i] = field.Alias\n\n\t\tswitch field.Name {\n\t\tcase \"__typename\":\n\t\t\tout.Values[i] = graphql.MarshalString({{$object.GQLType|quote}})\n\t\t{{- range $field := $object.Fields }}\n\t\tcase \"{{$field.GQLName}}\":\n\t\t\t{{- if $field.IsConcurrent }}\n\t\t\t\twg.Add(1)\n\t\t\t\tgo func(i int, field graphql.CollectedField) {\n\t\t\t{{- end }}\n\t\t\t\tout.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})\n\t\t\t\t{{- if $field.ASTType.NonNull }}\n\t\t\t\t\tif out.Values[i] == graphql.Null {\n\t\t\t\t\t\tinvalid = true\n\t\t\t\t\t}\n\t\t\t\t{{- end }}\n\t\t\t{{- if $field.IsConcurrent }}\n\t\t\t\t\twg.Done()\n\t\t\t\t}(i, field)\n\t\t\t{{- end }}\n\t\t{{- end }}\n\t\tdefault:\n\t\t\tpanic(\"unknown field \" + strconv.Quote(field.Name))\n\t\t}\n\t}\n\t{{if $object.IsConcurrent}} wg.Wait() {{end}}\n\tif invalid { return graphql.Null }\n\treturn out\n}\n{{- end }}\n",
+ "resolver.gotpl": "//go:generate gorunpkg github.com/99designs/gqlgen\n\npackage {{ .PackageName }}\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\ntype {{.ResolverType}} struct {}\n\n{{ range $object := .Objects -}}\n\t{{- if $object.HasResolvers -}}\n\t\tfunc (r *{{$.ResolverType}}) {{$object.GQLType}}() {{ $object.ResolverInterface.FullName }} {\n\t\t\treturn &{{lcFirst $object.GQLType}}Resolver{r}\n\t\t}\n\t{{ end -}}\n{{ end }}\n\n{{ range $object := .Objects -}}\n\t{{- if $object.HasResolvers -}}\n\t\ttype {{lcFirst $object.GQLType}}Resolver struct { *Resolver }\n\n\t\t{{ range $field := $object.Fields -}}\n\t\t\t{{- if $field.IsResolver -}}\n\t\t\tfunc (r *{{lcFirst $object.GQLType}}Resolver) {{ $field.ShortResolverDeclaration }} {\n\t\t\t\tpanic(\"not implemented\")\n\t\t\t}\n\t\t\t{{ end -}}\n\t\t{{ end -}}\n\t{{ end -}}\n{{ end }}\n",
+ "server.gotpl": "package main\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\nconst defaultPort = \"8080\"\n\nfunc main() {\n\tport := os.Getenv(\"PORT\")\n\tif port == \"\" {\n\t\tport = defaultPort\n\t}\n\n\thttp.Handle(\"/\", handler.Playground(\"GraphQL playground\", \"/query\"))\n\thttp.Handle(\"/query\", handler.GraphQL({{.ExecPackageName}}.NewExecutableSchema({{.ExecPackageName}}.Config{Resolvers: &{{.ResolverPackageName}}.Resolver{}})))\n\n\tlog.Printf(\"connect to http://localhost:%s/ for GraphQL playground\", port)\n\tlog.Fatal(http.ListenAndServe(\":\" + port, nil))\n}\n",
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/field.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/field.gotpl
new file mode 100644
index 00000000..b33f2123
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/field.gotpl
@@ -0,0 +1,74 @@
+{{ $field := . }}
+{{ $object := $field.Object }}
+
+{{- if $object.Stream }}
+ func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {
+ {{- if $field.Args }}
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := {{ $field.ArgsFunc }}(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return nil
+ }
+ {{- end }}
+ ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ Field: field,
+ })
+ results, err := ec.resolvers.{{ $field.ShortInvocation }}
+ if err != nil {
+ ec.Error(ctx, err)
+ return nil
+ }
+ return func() graphql.Marshaler {
+ res, ok := <-results
+ if !ok {
+ return nil
+ }
+ var out graphql.OrderedMap
+ out.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())
+ return &out
+ }
+ }
+{{ else }}
+ // nolint: vetshadow
+ func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {
+ {{- if $field.Args }}
+ rawArgs := field.ArgumentMap(ec.Variables)
+ args, err := {{ $field.ArgsFunc }}(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return graphql.Null
+ }
+ {{- end }}
+ rctx := &graphql.ResolverContext{
+ Object: {{$object.GQLType|quote}},
+ Args: {{if $field.Args }}args{{else}}nil{{end}},
+ Field: field,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
+ resTmp := ec.FieldMiddleware(ctx, {{if $object.Root}}nil{{else}}obj{{end}}, func(ctx context.Context) (interface{}, error) {
+ {{- if $field.IsResolver }}
+ return ec.resolvers.{{ $field.ShortInvocation }}
+ {{- else if $field.IsMethod }}
+ {{- if $field.NoErr }}
+ return {{$field.GoReceiverName}}.{{$field.GoFieldName}}({{ $field.CallArgs }}), nil
+ {{- else }}
+ return {{$field.GoReceiverName}}.{{$field.GoFieldName}}({{ $field.CallArgs }})
+ {{- end }}
+ {{- else if $field.IsVariable }}
+ return {{$field.GoReceiverName}}.{{$field.GoFieldName}}, nil
+ {{- end }}
+ })
+ if resTmp == nil {
+ {{- if $field.ASTType.NonNull }}
+ if !ec.HasError(rctx) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ {{- end }}
+ return graphql.Null
+ }
+ res := resTmp.({{$field.Signature}})
+ rctx.Result = res
+ {{ $field.WriteJson }}
+ }
+{{ end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/generated.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/generated.gotpl
new file mode 100644
index 00000000..8250bc7a
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/generated.gotpl
@@ -0,0 +1,263 @@
+// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
+
+package {{ .PackageName }}
+
+import (
+{{- range $import := .Imports }}
+ {{- $import.Write }}
+{{ end }}
+)
+
+// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
+func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
+ return &executableSchema{
+ resolvers: cfg.Resolvers,
+ directives: cfg.Directives,
+ complexity: cfg.Complexity,
+ }
+}
+
+type Config struct {
+ Resolvers ResolverRoot
+ Directives DirectiveRoot
+ Complexity ComplexityRoot
+}
+
+type ResolverRoot interface {
+{{- range $object := .Objects -}}
+ {{ if $object.HasResolvers -}}
+ {{$object.GQLType}}() {{$object.GQLType}}Resolver
+ {{ end }}
+{{- end }}
+}
+
+type DirectiveRoot struct {
+{{ range $directive := .Directives }}
+ {{ $directive.Declaration }}
+{{ end }}
+}
+
+type ComplexityRoot struct {
+{{ range $object := .Objects }}
+ {{ if not $object.IsReserved -}}
+ {{ $object.GQLType|toCamel }} struct {
+ {{ range $field := $object.Fields -}}
+ {{ if not $field.IsReserved -}}
+ {{ $field.GQLName|toCamel }} {{ $field.ComplexitySignature }}
+ {{ end }}
+ {{- end }}
+ }
+ {{- end }}
+{{ end }}
+}
+
+{{ range $object := .Objects -}}
+ {{ if $object.HasResolvers }}
+ type {{$object.GQLType}}Resolver interface {
+ {{ range $field := $object.Fields -}}
+ {{ $field.ShortResolverDeclaration }}
+ {{ end }}
+ }
+ {{- end }}
+{{- end }}
+
+{{ range $object := .Objects -}}
+ {{ range $field := $object.Fields -}}
+ {{ if $field.Args }}
+ func {{ $field.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ {{ template "args.gotpl" $field.Args }}
+ }
+ {{ end }}
+ {{ end }}
+{{- end }}
+
+{{ range $directive := .Directives }}
+ {{ if $directive.Args }}
+ func {{ $directive.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ {{ template "args.gotpl" $directive.Args }}
+ }
+ {{ end }}
+{{ end }}
+
+type executableSchema struct {
+ resolvers ResolverRoot
+ directives DirectiveRoot
+ complexity ComplexityRoot
+}
+
+func (e *executableSchema) Schema() *ast.Schema {
+ return parsedSchema
+}
+
+func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
+ switch typeName + "." + field {
+ {{ range $object := .Objects }}
+ {{ if not $object.IsReserved }}
+ {{ range $field := $object.Fields }}
+ {{ if not $field.IsReserved }}
+ case "{{$object.GQLType}}.{{$field.GQLName}}":
+ if e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}} == nil {
+ break
+ }
+ {{ if $field.Args }}
+ args, err := {{ $field.ArgsFunc }}(rawArgs)
+ if err != nil {
+ return 0, false
+ }
+ {{ end }}
+ return e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}}(childComplexity{{if $field.Args}}, {{$field.ComplexityArgs}} {{end}}), true
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ }
+ return 0, false
+}
+
+func (e *executableSchema) Query(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
+ {{- if .QueryRoot }}
+ ec := executionContext{graphql.GetRequestContext(ctx), e}
+
+ buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
+ data := ec._{{.QueryRoot.GQLType}}(ctx, op.SelectionSet)
+ var buf bytes.Buffer
+ data.MarshalGQL(&buf)
+ return buf.Bytes()
+ })
+
+ return &graphql.Response{
+ Data: buf,
+ Errors: ec.Errors,
+ }
+ {{- else }}
+ return graphql.ErrorResponse(ctx, "queries are not supported")
+ {{- end }}
+}
+
+func (e *executableSchema) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
+ {{- if .MutationRoot }}
+ ec := executionContext{graphql.GetRequestContext(ctx), e}
+
+ buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
+ data := ec._{{.MutationRoot.GQLType}}(ctx, op.SelectionSet)
+ var buf bytes.Buffer
+ data.MarshalGQL(&buf)
+ return buf.Bytes()
+ })
+
+ return &graphql.Response{
+ Data: buf,
+ Errors: ec.Errors,
+ }
+ {{- else }}
+ return graphql.ErrorResponse(ctx, "mutations are not supported")
+ {{- end }}
+}
+
+func (e *executableSchema) Subscription(ctx context.Context, op *ast.OperationDefinition) func() *graphql.Response {
+ {{- if .SubscriptionRoot }}
+ ec := executionContext{graphql.GetRequestContext(ctx), e}
+
+ next := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.SelectionSet)
+ if ec.Errors != nil {
+ return graphql.OneShot(&graphql.Response{Data: []byte("null"), Errors: ec.Errors})
+ }
+
+ var buf bytes.Buffer
+ return func() *graphql.Response {
+ buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
+ buf.Reset()
+ data := next()
+
+ if data == nil {
+ return nil
+ }
+ data.MarshalGQL(&buf)
+ return buf.Bytes()
+ })
+
+ if buf == nil {
+ return nil
+ }
+
+ return &graphql.Response{
+ Data: buf,
+ Errors: ec.Errors,
+ }
+ }
+ {{- else }}
+ return graphql.OneShot(graphql.ErrorResponse(ctx, "subscriptions are not supported"))
+ {{- end }}
+}
+
+type executionContext struct {
+ *graphql.RequestContext
+ *executableSchema
+}
+
+{{- range $object := .Objects }}
+ {{ template "object.gotpl" $object }}
+
+ {{- range $field := $object.Fields }}
+ {{ template "field.gotpl" $field }}
+ {{ end }}
+{{- end}}
+
+{{- range $interface := .Interfaces }}
+ {{ template "interface.gotpl" $interface }}
+{{- end }}
+
+{{- range $input := .Inputs }}
+ {{ template "input.gotpl" $input }}
+{{- end }}
+
+func (ec *executionContext) FieldMiddleware(ctx context.Context, obj interface{}, next graphql.Resolver) (ret interface{}) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ {{- if .Directives }}
+ rctx := graphql.GetResolverContext(ctx)
+ for _, d := range rctx.Field.Definition.Directives {
+ switch d.Name {
+ {{- range $directive := .Directives }}
+ case "{{$directive.Name}}":
+ if ec.directives.{{$directive.Name|ucFirst}} != nil {
+ {{- if $directive.Args }}
+ rawArgs := d.ArgumentMap(ec.Variables)
+ args, err := {{ $directive.ArgsFunc }}(rawArgs)
+ if err != nil {
+ ec.Error(ctx, err)
+ return nil
+ }
+ {{- end }}
+ n := next
+ next = func(ctx context.Context) (interface{}, error) {
+ return ec.directives.{{$directive.Name|ucFirst}}({{$directive.CallArgs}})
+ }
+ }
+ {{- end }}
+ }
+ }
+ {{- end }}
+ res, err := ec.ResolverMiddleware(ctx, next)
+ if err != nil {
+ ec.Error(ctx, err)
+ return nil
+ }
+ return res
+}
+
+func (ec *executionContext) introspectSchema() *introspection.Schema {
+ return introspection.WrapSchema(parsedSchema)
+}
+
+func (ec *executionContext) introspectType(name string) *introspection.Type {
+ return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name])
+}
+
+var parsedSchema = gqlparser.MustLoadSchema(
+ &ast.Source{Name: {{.SchemaFilename|quote}}, Input: {{.SchemaRaw|rawQuote}}},
+)
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/input.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/input.gotpl
index 6073daf4..f543608d 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/input.gotpl
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/input.gotpl
@@ -15,7 +15,7 @@
{{- range $field := .Fields }}
case {{$field.GQLName|quote}}:
var err error
- {{ $field.Unmarshal (print "it." $field.GoVarName) "v" }}
+ {{ $field.Unmarshal (print "it." $field.GoFieldName) "v" }}
if err != nil {
return it, err
}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/interface.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/interface.gotpl
index 817d0abe..84cbe500 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/interface.gotpl
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/interface.gotpl
@@ -1,6 +1,6 @@
{{- $interface := . }}
-func (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel []query.Selection, obj *{{$interface.FullName}}) graphql.Marshaler {
+func (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel ast.SelectionSet, obj *{{$interface.FullName}}) graphql.Marshaler {
switch obj := (*obj).(type) {
case nil:
return graphql.Null
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/models.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/models.gotpl
index e66266a5..7427d71d 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/models.gotpl
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/models.gotpl
@@ -1,4 +1,4 @@
-// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.
+// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
package {{ .PackageName }}
@@ -9,13 +9,17 @@ import (
)
{{ range $model := .Models }}
+ {{with .Description}} {{.|prefixLines "// "}} {{end}}
{{- if .IsInterface }}
type {{.GoType}} interface {}
{{- else }}
type {{.GoType}} struct {
{{- range $field := .Fields }}
- {{- if $field.GoVarName }}
- {{ $field.GoVarName }} {{$field.Signature}} `json:"{{$field.GQLName}}"`
+ {{- with .Description}}
+ {{.|prefixLines "// "}}
+ {{- end}}
+ {{- if $field.GoFieldName }}
+ {{ $field.GoFieldName }} {{$field.Signature}} `json:"{{$field.GQLName}}"`
{{- else }}
{{ $field.GoFKName }} {{$field.GoFKType}}
{{- end }}
@@ -25,10 +29,13 @@ import (
{{- end}}
{{ range $enum := .Enums }}
+ {{with .Description}}{{.|prefixLines "// "}} {{end}}
type {{.GoType}} string
const (
- {{ range $value := .Values -}}
- {{with .Description}} {{.|prefixLines "// "}} {{end}}
+ {{- range $value := .Values}}
+ {{- with .Description}}
+ {{.|prefixLines "// "}}
+ {{- end}}
{{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}}
{{- end }}
)
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/object.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/object.gotpl
index b531d5fe..e98cbe1e 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/object.gotpl
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/object.gotpl
@@ -4,8 +4,8 @@ var {{ $object.GQLType|lcFirst}}Implementors = {{$object.Implementors}}
// nolint: gocyclo, errcheck, gas, goconst
{{- if .Stream }}
-func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection) func() graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)
+func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet) func() graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)
ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
Object: {{$object.GQLType|quote}},
})
@@ -24,14 +24,17 @@ func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []quer
}
}
{{- else }}
-func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {
- fields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)
+func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)
{{if $object.Root}}
ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
Object: {{$object.GQLType|quote}},
})
{{end}}
+
+ {{if $object.IsConcurrent}} var wg sync.WaitGroup {{end}}
out := graphql.NewOrderedMap(len(fields))
+ invalid := false
for i, field := range fields {
out.Keys[i] = field.Alias
@@ -40,13 +43,27 @@ func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []quer
out.Values[i] = graphql.MarshalString({{$object.GQLType|quote}})
{{- range $field := $object.Fields }}
case "{{$field.GQLName}}":
- out.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})
+ {{- if $field.IsConcurrent }}
+ wg.Add(1)
+ go func(i int, field graphql.CollectedField) {
+ {{- end }}
+ out.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})
+ {{- if $field.ASTType.NonNull }}
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ {{- end }}
+ {{- if $field.IsConcurrent }}
+ wg.Done()
+ }(i, field)
+ {{- end }}
{{- end }}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
-
+ {{if $object.IsConcurrent}} wg.Wait() {{end}}
+ if invalid { return graphql.Null }
return out
}
{{- end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/resolver.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/resolver.gotpl
new file mode 100644
index 00000000..dd8acf24
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/resolver.gotpl
@@ -0,0 +1,33 @@
+//go:generate gorunpkg github.com/99designs/gqlgen
+
+package {{ .PackageName }}
+
+import (
+{{- range $import := .Imports }}
+ {{- $import.Write }}
+{{ end }}
+)
+
+type {{.ResolverType}} struct {}
+
+{{ range $object := .Objects -}}
+ {{- if $object.HasResolvers -}}
+ func (r *{{$.ResolverType}}) {{$object.GQLType}}() {{ $object.ResolverInterface.FullName }} {
+ return &{{lcFirst $object.GQLType}}Resolver{r}
+ }
+ {{ end -}}
+{{ end }}
+
+{{ range $object := .Objects -}}
+ {{- if $object.HasResolvers -}}
+ type {{lcFirst $object.GQLType}}Resolver struct { *Resolver }
+
+ {{ range $field := $object.Fields -}}
+ {{- if $field.IsResolver -}}
+ func (r *{{lcFirst $object.GQLType}}Resolver) {{ $field.ShortResolverDeclaration }} {
+ panic("not implemented")
+ }
+ {{ end -}}
+ {{ end -}}
+ {{ end -}}
+{{ end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/server.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/server.gotpl
new file mode 100644
index 00000000..f23b30e1
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/server.gotpl
@@ -0,0 +1,22 @@
+package main
+
+import (
+{{- range $import := .Imports }}
+ {{- $import.Write }}
+{{ end }}
+)
+
+const defaultPort = "8080"
+
+func main() {
+ port := os.Getenv("PORT")
+ if port == "" {
+ port = defaultPort
+ }
+
+ http.Handle("/", handler.Playground("GraphQL playground", "/query"))
+ http.Handle("/query", handler.GraphQL({{.ExecPackageName}}.NewExecutableSchema({{.ExecPackageName}}.Config{Resolvers: &{{.ResolverPackageName}}.Resolver{}})))
+
+ log.Printf("connect to http://localhost:%s/ for GraphQL playground", port)
+ log.Fatal(http.ListenAndServe(":" + port, nil))
+}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/templates.go b/vendor/github.com/99designs/gqlgen/codegen/templates/templates.go
index 3d29b403..df909cb5 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/templates.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/templates.go
@@ -5,11 +5,19 @@ package templates
import (
"bytes"
"fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
"sort"
"strconv"
"strings"
"text/template"
"unicode"
+
+ "log"
+
+ "github.com/pkg/errors"
+ "golang.org/x/tools/imports"
)
func Run(name string, tpldata interface{}) (*bytes.Buffer, error) {
@@ -96,6 +104,8 @@ func dump(val interface{}) string {
switch val := val.(type) {
case int:
return strconv.Itoa(val)
+ case int64:
+ return fmt.Sprintf("%d", val)
case float64:
return fmt.Sprintf("%f", val)
case string:
@@ -137,3 +147,47 @@ func dump(val interface{}) string {
func prefixLines(prefix, s string) string {
return prefix + strings.Replace(s, "\n", "\n"+prefix, -1)
}
+
+func RenderToFile(tpl string, filename string, data interface{}) error {
+ var buf *bytes.Buffer
+ buf, err := Run(tpl, data)
+ if err != nil {
+ return errors.Wrap(err, filename+" generation failed")
+ }
+
+ if err := write(filename, buf.Bytes()); err != nil {
+ return err
+ }
+
+ log.Println(filename)
+
+ return nil
+}
+
+func gofmt(filename string, b []byte) ([]byte, error) {
+ out, err := imports.Process(filename, b, nil)
+ if err != nil {
+ return b, errors.Wrap(err, "unable to gofmt")
+ }
+ return out, nil
+}
+
+func write(filename string, b []byte) error {
+ err := os.MkdirAll(filepath.Dir(filename), 0755)
+ if err != nil {
+ return errors.Wrap(err, "failed to create directory")
+ }
+
+ formatted, err := gofmt(filename, b)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "gofmt failed: %s\n", err.Error())
+ formatted = b
+ }
+
+ err = ioutil.WriteFile(filename, formatted, 0644)
+ if err != nil {
+ return errors.Wrapf(err, "failed to write %s", filename)
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/type.go b/vendor/github.com/99designs/gqlgen/codegen/type.go
index 7af24b3c..8c53fe55 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/type.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/type.go
@@ -3,6 +3,8 @@ package codegen
import (
"strconv"
"strings"
+
+ "github.com/vektah/gqlparser/ast"
)
type NamedTypes map[string]*NamedType
@@ -26,8 +28,9 @@ type Ref struct {
type Type struct {
*NamedType
- Modifiers []string
- CastType *Ref // the type to cast to when unmarshalling
+ Modifiers []string
+ ASTType *ast.Type
+ AliasedType *Ref
}
const (
@@ -47,6 +50,9 @@ func (t Ref) PkgDot() string {
}
func (t Type) Signature() string {
+ if t.AliasedType != nil {
+ return strings.Join(t.Modifiers, "") + t.AliasedType.FullName()
+ }
return strings.Join(t.Modifiers, "") + t.FullName()
}
@@ -109,6 +115,8 @@ func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) s
if {{.raw}} != nil {
if tmp1, ok := {{.raw}}.([]interface{}); ok {
{{.rawSlice}} = tmp1
+ } else {
+ {{.rawSlice}} = []interface{}{ {{.raw}} }
}
}
{{.result}} = make({{.type}}, len({{.rawSlice}}))
@@ -125,11 +133,11 @@ func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) s
}
realResult := result
- if t.CastType != nil {
+ if t.AliasedType != nil {
result = "castTmp"
}
- return tpl(`{{- if .t.CastType }}
+ return tpl(`{{- if .t.AliasedType }}
var castTmp {{.t.FullName}}
{{ end }}
{{- if eq .t.GoType "map[string]interface{}" }}
@@ -139,8 +147,8 @@ func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) s
{{- else -}}
err = (&{{.result}}).UnmarshalGQL({{.raw}})
{{- end }}
- {{- if .t.CastType }}
- {{ .realResult }} = {{.t.CastType.FullName}}(castTmp)
+ {{- if .t.AliasedType }}
+ {{ .realResult }} = {{.t.AliasedType.FullName}}(castTmp)
{{- end }}`, map[string]interface{}{
"realResult": realResult,
"result": result,
@@ -150,7 +158,7 @@ func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) s
}
func (t Type) Marshal(val string) string {
- if t.CastType != nil {
+ if t.AliasedType != nil {
val = t.GoType + "(" + val + ")"
}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/type_build.go b/vendor/github.com/99designs/gqlgen/codegen/type_build.go
index ba2874b0..f0ec6785 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/type_build.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/type_build.go
@@ -1,12 +1,10 @@
package codegen
import (
- "fmt"
"go/types"
"strings"
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/schema"
+ "github.com/vektah/gqlparser/ast"
"golang.org/x/tools/go/loader"
)
@@ -20,7 +18,7 @@ func (cfg *Config) buildNamedTypes() NamedTypes {
t.IsUserDefined = true
t.Package, t.GoType = pkgAndType(userEntry.Model)
} else if t.IsScalar {
- t.Package = "github.com/vektah/gqlgen/graphql"
+ t.Package = "github.com/99designs/gqlgen/graphql"
t.GoType = "String"
}
@@ -50,16 +48,16 @@ func (cfg *Config) bindTypes(imports *Imports, namedTypes NamedTypes, destDir st
// namedTypeFromSchema objects for every graphql type, including primitives.
// don't recurse into object fields or interfaces yet, lets make sure we have collected everything first.
-func namedTypeFromSchema(schemaType schema.NamedType) *NamedType {
- switch val := schemaType.(type) {
- case *schema.Scalar, *schema.Enum:
- return &NamedType{GQLType: val.TypeName(), IsScalar: true}
- case *schema.Interface, *schema.Union:
- return &NamedType{GQLType: val.TypeName(), IsInterface: true}
- case *schema.InputObject:
- return &NamedType{GQLType: val.TypeName(), IsInput: true}
+func namedTypeFromSchema(schemaType *ast.Definition) *NamedType {
+ switch schemaType.Kind {
+ case ast.Scalar, ast.Enum:
+ return &NamedType{GQLType: schemaType.Name, IsScalar: true}
+ case ast.Interface, ast.Union:
+ return &NamedType{GQLType: schemaType.Name, IsInterface: true}
+ case ast.InputObject:
+ return &NamedType{GQLType: schemaType.Name, IsInput: true}
default:
- return &NamedType{GQLType: val.TypeName()}
+ return &NamedType{GQLType: schemaType.Name}
}
}
@@ -73,40 +71,31 @@ func pkgAndType(name string) (string, string) {
return normalizeVendor(strings.Join(parts[:len(parts)-1], ".")), parts[len(parts)-1]
}
-func (n NamedTypes) getType(t common.Type) *Type {
+func (n NamedTypes) getType(t *ast.Type) *Type {
+ orig := t
var modifiers []string
- usePtr := true
for {
- if _, nonNull := t.(*common.NonNull); nonNull {
- usePtr = false
- } else if _, nonNull := t.(*common.List); nonNull {
- usePtr = true
+ if t.Elem != nil {
+ modifiers = append(modifiers, modList)
+ t = t.Elem
} else {
- if usePtr {
+ if !t.NonNull {
modifiers = append(modifiers, modPtr)
}
- usePtr = true
- }
-
- switch val := t.(type) {
- case *common.NonNull:
- t = val.OfType
- case *common.List:
- modifiers = append(modifiers, modList)
- t = val.OfType
- case schema.NamedType:
- t := &Type{
- NamedType: n[val.TypeName()],
+ if n[t.NamedType] == nil {
+ panic("missing type " + t.NamedType)
+ }
+ res := &Type{
+ NamedType: n[t.NamedType],
Modifiers: modifiers,
+ ASTType: orig,
}
- if t.IsInterface {
- t.StripPtr()
+ if res.IsInterface {
+ res.StripPtr()
}
- return t
- default:
- panic(fmt.Errorf("unknown type %T", t))
+ return res
}
}
}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/util.go b/vendor/github.com/99designs/gqlgen/codegen/util.go
index 5ff41074..1849f100 100644
--- a/vendor/github.com/vektah/gqlgen/codegen/util.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/util.go
@@ -3,6 +3,7 @@ package codegen
import (
"fmt"
"go/types"
+ "reflect"
"regexp"
"strings"
@@ -104,19 +105,50 @@ func findMethod(typ *types.Named, name string) *types.Func {
return nil
}
-func findField(typ *types.Struct, name string) *types.Var {
+// findField attempts to match the name to a struct field with the following
+// priorites:
+// 1. If struct tag is passed then struct tag has highest priority
+// 2. Field in an embedded struct
+// 3. Actual Field name
+func findField(typ *types.Struct, name, structTag string) (*types.Var, error) {
+ var foundField *types.Var
+ foundFieldWasTag := false
+
for i := 0; i < typ.NumFields(); i++ {
field := typ.Field(i)
+
+ if structTag != "" {
+ tags := reflect.StructTag(typ.Tag(i))
+ if val, ok := tags.Lookup(structTag); ok {
+ if strings.EqualFold(val, name) {
+ if foundField != nil && foundFieldWasTag {
+ return nil, errors.Errorf("tag %s is ambigious; multiple fields have the same tag value of %s", structTag, val)
+ }
+
+ foundField = field
+ foundFieldWasTag = true
+ }
+ }
+ }
+
if field.Anonymous() {
if named, ok := field.Type().(*types.Struct); ok {
- if f := findField(named, name); f != nil {
- return f
+ f, err := findField(named, name, structTag)
+ if err != nil && !strings.HasPrefix(err.Error(), "no field named") {
+ return nil, err
+ }
+ if f != nil && foundField == nil {
+ foundField = f
}
}
if named, ok := field.Type().Underlying().(*types.Struct); ok {
- if f := findField(named, name); f != nil {
- return f
+ f, err := findField(named, name, structTag)
+ if err != nil && !strings.HasPrefix(err.Error(), "no field named") {
+ return nil, err
+ }
+ if f != nil && foundField == nil {
+ foundField = f
}
}
}
@@ -125,11 +157,16 @@ func findField(typ *types.Struct, name string) *types.Var {
continue
}
- if strings.EqualFold(field.Name(), name) {
- return field
+ if strings.EqualFold(field.Name(), name) && foundField == nil {
+ foundField = field
}
}
- return nil
+
+ if foundField == nil {
+ return nil, fmt.Errorf("no field named %s", name)
+ }
+
+ return foundField, nil
}
type BindError struct {
@@ -161,11 +198,15 @@ func (b BindErrors) Error() string {
return strings.Join(errs, "\n\n")
}
-func bindObject(t types.Type, object *Object, imports *Imports) BindErrors {
+func bindObject(t types.Type, object *Object, imports *Imports, structTag string) BindErrors {
var errs BindErrors
for i := range object.Fields {
field := &object.Fields[i]
+ if field.ForceResolver {
+ continue
+ }
+
// first try binding to a method
methodErr := bindMethod(imports, t, field)
if methodErr == nil {
@@ -173,7 +214,7 @@ func bindObject(t types.Type, object *Object, imports *Imports) BindErrors {
}
// otherwise try binding to a var
- varErr := bindVar(imports, t, field)
+ varErr := bindVar(imports, t, field, structTag)
if varErr != nil {
errs = append(errs, BindError{
@@ -194,7 +235,11 @@ func bindMethod(imports *Imports, t types.Type, field *Field) error {
return fmt.Errorf("not a named type")
}
- method := findMethod(namedType, field.GQLName)
+ goName := field.GQLName
+ if field.GoFieldName != "" {
+ goName = field.GoFieldName
+ }
+ method := findMethod(namedType, goName)
if method == nil {
return fmt.Errorf("no method named %s", field.GQLName)
}
@@ -216,20 +261,26 @@ func bindMethod(imports *Imports, t types.Type, field *Field) error {
}
// success, args and return type match. Bind to method
- field.GoMethodName = "obj." + method.Name()
+ field.GoFieldType = GoFieldMethod
+ field.GoReceiverName = "obj"
+ field.GoFieldName = method.Name()
field.Args = newArgs
return nil
}
-func bindVar(imports *Imports, t types.Type, field *Field) error {
+func bindVar(imports *Imports, t types.Type, field *Field, structTag string) error {
underlying, ok := t.Underlying().(*types.Struct)
if !ok {
return fmt.Errorf("not a struct")
}
- structField := findField(underlying, field.GQLName)
- if structField == nil {
- return fmt.Errorf("no field named %s", field.GQLName)
+ goName := field.GQLName
+ if field.GoFieldName != "" {
+ goName = field.GoFieldName
+ }
+ structField, err := findField(underlying, goName, structTag)
+ if err != nil {
+ return err
}
if err := validateTypeBinding(imports, field, structField.Type()); err != nil {
@@ -237,7 +288,9 @@ func bindVar(imports *Imports, t types.Type, field *Field) error {
}
// success, bind to var
- field.GoVarName = structField.Name()
+ field.GoFieldType = GoFieldVariable
+ field.GoReceiverName = "obj"
+ field.GoFieldName = structField.Name()
return nil
}
@@ -249,7 +302,9 @@ nextArg:
param := params.At(j)
for _, oldArg := range field.Args {
if strings.EqualFold(oldArg.GQLName, param.Name()) {
- oldArg.Type.Modifiers = modifiersFromGoType(param.Type())
+ if !field.ForceResolver {
+ oldArg.Type.Modifiers = modifiersFromGoType(param.Type())
+ }
newArgs = append(newArgs, oldArg)
continue nextArg
}
@@ -276,7 +331,7 @@ func validateTypeBinding(imports *Imports, field *Field, goType types.Type) erro
field.Type.Modifiers = modifiersFromGoType(goType)
pkg, typ := pkgAndType(goType.String())
imp := imports.findByPath(pkg)
- field.CastType = &Ref{GoType: typ, Import: imp}
+ field.AliasedType = &Ref{GoType: typ, Import: imp}
return nil
}
diff --git a/vendor/github.com/99designs/gqlgen/complexity/complexity.go b/vendor/github.com/99designs/gqlgen/complexity/complexity.go
new file mode 100644
index 00000000..d5b46bf4
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/complexity/complexity.go
@@ -0,0 +1,104 @@
+package complexity
+
+import (
+ "github.com/99designs/gqlgen/graphql"
+ "github.com/vektah/gqlparser/ast"
+)
+
+func Calculate(es graphql.ExecutableSchema, op *ast.OperationDefinition, vars map[string]interface{}) int {
+ walker := complexityWalker{
+ es: es,
+ schema: es.Schema(),
+ vars: vars,
+ }
+ return walker.selectionSetComplexity(op.SelectionSet)
+}
+
+type complexityWalker struct {
+ es graphql.ExecutableSchema
+ schema *ast.Schema
+ vars map[string]interface{}
+}
+
+func (cw complexityWalker) selectionSetComplexity(selectionSet ast.SelectionSet) int {
+ var complexity int
+ for _, selection := range selectionSet {
+ switch s := selection.(type) {
+ case *ast.Field:
+ fieldDefinition := cw.schema.Types[s.Definition.Type.Name()]
+ var childComplexity int
+ switch fieldDefinition.Kind {
+ case ast.Object, ast.Interface, ast.Union:
+ childComplexity = cw.selectionSetComplexity(s.SelectionSet)
+ }
+
+ args := s.ArgumentMap(cw.vars)
+ var fieldComplexity int
+ if s.ObjectDefinition.Kind == ast.Interface {
+ fieldComplexity = cw.interfaceFieldComplexity(s.ObjectDefinition, s.Name, childComplexity, args)
+ } else {
+ fieldComplexity = cw.fieldComplexity(s.ObjectDefinition.Name, s.Name, childComplexity, args)
+ }
+ complexity = safeAdd(complexity, fieldComplexity)
+
+ case *ast.FragmentSpread:
+ complexity = safeAdd(complexity, cw.selectionSetComplexity(s.Definition.SelectionSet))
+
+ case *ast.InlineFragment:
+ complexity = safeAdd(complexity, cw.selectionSetComplexity(s.SelectionSet))
+ }
+ }
+ return complexity
+}
+
+func (cw complexityWalker) interfaceFieldComplexity(def *ast.Definition, field string, childComplexity int, args map[string]interface{}) int {
+ // Interfaces don't have their own separate field costs, so they have to assume the worst case.
+ // We iterate over all implementors and choose the most expensive one.
+ maxComplexity := 0
+ implementors := cw.schema.GetPossibleTypes(def)
+ for _, t := range implementors {
+ fieldComplexity := cw.fieldComplexity(t.Name, field, childComplexity, args)
+ if fieldComplexity > maxComplexity {
+ maxComplexity = fieldComplexity
+ }
+ }
+ return maxComplexity
+}
+
+func (cw complexityWalker) fieldComplexity(object, field string, childComplexity int, args map[string]interface{}) int {
+ if customComplexity, ok := cw.es.Complexity(object, field, childComplexity, args); ok && customComplexity >= childComplexity {
+ return customComplexity
+ }
+ // default complexity calculation
+ return safeAdd(1, childComplexity)
+}
+
+const maxInt = int(^uint(0) >> 1)
+
+// safeAdd is a saturating add of a and b that ignores negative operands.
+// If a + b would overflow through normal Go addition,
+// it returns the maximum integer value instead.
+//
+// Adding complexities with this function prevents attackers from intentionally
+// overflowing the complexity calculation to allow overly-complex queries.
+//
+// It also helps mitigate the impact of custom complexities that accidentally
+// return negative values.
+func safeAdd(a, b int) int {
+ // Ignore negative operands.
+ if a < 0 {
+ if b < 0 {
+ return 1
+ }
+ return b
+ } else if b < 0 {
+ return a
+ }
+
+ c := a + b
+ if c < a {
+ // Set c to maximum integer instead of overflowing.
+ c = maxInt
+ }
+ return c
+}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/bool.go b/vendor/github.com/99designs/gqlgen/graphql/bool.go
index 7053bbca..7053bbca 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/bool.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/bool.go
diff --git a/vendor/github.com/vektah/gqlgen/graphql/context.go b/vendor/github.com/99designs/gqlgen/graphql/context.go
index 8f544100..6baee83c 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/context.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/context.go
@@ -5,45 +5,52 @@ import (
"fmt"
"sync"
- "github.com/vektah/gqlgen/neelance/query"
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
)
type Resolver func(ctx context.Context) (res interface{}, err error)
-type ResolverMiddleware func(ctx context.Context, next Resolver) (res interface{}, err error)
+type FieldMiddleware func(ctx context.Context, next Resolver) (res interface{}, err error)
type RequestMiddleware func(ctx context.Context, next func(ctx context.Context) []byte) []byte
type RequestContext struct {
RawQuery string
Variables map[string]interface{}
- Doc *query.Document
+ Doc *ast.QueryDocument
// ErrorPresenter will be used to generate the error
// message from errors given to Error().
- ErrorPresenter ErrorPresenterFunc
- Recover RecoverFunc
- ResolverMiddleware ResolverMiddleware
- RequestMiddleware RequestMiddleware
+ ErrorPresenter ErrorPresenterFunc
+ Recover RecoverFunc
+ ResolverMiddleware FieldMiddleware
+ DirectiveMiddleware FieldMiddleware
+ RequestMiddleware RequestMiddleware
errorsMu sync.Mutex
- Errors []*Error
+ Errors gqlerror.List
}
func DefaultResolverMiddleware(ctx context.Context, next Resolver) (res interface{}, err error) {
return next(ctx)
}
+func DefaultDirectiveMiddleware(ctx context.Context, next Resolver) (res interface{}, err error) {
+ return next(ctx)
+}
+
func DefaultRequestMiddleware(ctx context.Context, next func(ctx context.Context) []byte) []byte {
return next(ctx)
}
-func NewRequestContext(doc *query.Document, query string, variables map[string]interface{}) *RequestContext {
+func NewRequestContext(doc *ast.QueryDocument, query string, variables map[string]interface{}) *RequestContext {
return &RequestContext{
- Doc: doc,
- RawQuery: query,
- Variables: variables,
- ResolverMiddleware: DefaultResolverMiddleware,
- RequestMiddleware: DefaultRequestMiddleware,
- Recover: DefaultRecover,
- ErrorPresenter: DefaultErrorPresenter,
+ Doc: doc,
+ RawQuery: query,
+ Variables: variables,
+ ResolverMiddleware: DefaultResolverMiddleware,
+ DirectiveMiddleware: DefaultDirectiveMiddleware,
+ RequestMiddleware: DefaultRequestMiddleware,
+ Recover: DefaultRecover,
+ ErrorPresenter: DefaultErrorPresenter,
}
}
@@ -68,54 +75,52 @@ func WithRequestContext(ctx context.Context, rc *RequestContext) context.Context
}
type ResolverContext struct {
+ Parent *ResolverContext
// The name of the type this field belongs to
Object string
// These are the args after processing, they can be mutated in middleware to change what the resolver will get.
Args map[string]interface{}
// The raw field
Field CollectedField
- // The path of fields to get to this resolver
- Path []interface{}
-}
-
-func (r *ResolverContext) PushField(alias string) {
- r.Path = append(r.Path, alias)
-}
+ // The index of array in path.
+ Index *int
+ // The result object of resolver
+ Result interface{}
+}
+
+func (r *ResolverContext) Path() []interface{} {
+ var path []interface{}
+ for it := r; it != nil; it = it.Parent {
+ if it.Index != nil {
+ path = append(path, *it.Index)
+ } else if it.Field.Field != nil {
+ path = append(path, it.Field.Alias)
+ }
+ }
-func (r *ResolverContext) PushIndex(index int) {
- r.Path = append(r.Path, index)
-}
+ // because we are walking up the chain, all the elements are backwards, do an inplace flip.
+ for i := len(path)/2 - 1; i >= 0; i-- {
+ opp := len(path) - 1 - i
+ path[i], path[opp] = path[opp], path[i]
+ }
-func (r *ResolverContext) Pop() {
- r.Path = r.Path[0 : len(r.Path)-1]
+ return path
}
func GetResolverContext(ctx context.Context) *ResolverContext {
- val := ctx.Value(resolver)
- if val == nil {
- return nil
- }
-
- return val.(*ResolverContext)
+ val, _ := ctx.Value(resolver).(*ResolverContext)
+ return val
}
func WithResolverContext(ctx context.Context, rc *ResolverContext) context.Context {
- parent := GetResolverContext(ctx)
- rc.Path = nil
- if parent != nil {
- rc.Path = append(rc.Path, parent.Path...)
- }
- if rc.Field.Alias != "" {
- rc.PushField(rc.Field.Alias)
- }
+ rc.Parent = GetResolverContext(ctx)
return context.WithValue(ctx, resolver, rc)
}
// This is just a convenient wrapper method for CollectFields
func CollectFieldsCtx(ctx context.Context, satisfies []string) []CollectedField {
- reqctx := GetRequestContext(ctx)
resctx := GetResolverContext(ctx)
- return CollectFields(reqctx.Doc, resctx.Field.Selections, satisfies, reqctx.Variables)
+ return CollectFields(ctx, resctx.Field.Selections, satisfies)
}
// Errorf sends an error string to the client, passing it through the formatter.
@@ -134,6 +139,34 @@ func (c *RequestContext) Error(ctx context.Context, err error) {
c.Errors = append(c.Errors, c.ErrorPresenter(ctx, err))
}
+// HasError returns true if the current field has already errored
+func (c *RequestContext) HasError(rctx *ResolverContext) bool {
+ c.errorsMu.Lock()
+ defer c.errorsMu.Unlock()
+ path := rctx.Path()
+
+ for _, err := range c.Errors {
+ if equalPath(err.Path, path) {
+ return true
+ }
+ }
+ return false
+}
+
+func equalPath(a []interface{}, b []interface{}) bool {
+ if len(a) != len(b) {
+ return false
+ }
+
+ for i := 0; i < len(a); i++ {
+ if a[i] != b[i] {
+ return false
+ }
+ }
+
+ return true
+}
+
// AddError is a convenience method for adding an error to the current response
func AddError(ctx context.Context, err error) {
GetRequestContext(ctx).Error(ctx, err)
diff --git a/vendor/github.com/99designs/gqlgen/graphql/error.go b/vendor/github.com/99designs/gqlgen/graphql/error.go
new file mode 100644
index 00000000..7f161a43
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/graphql/error.go
@@ -0,0 +1,31 @@
+package graphql
+
+import (
+ "context"
+
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+type ErrorPresenterFunc func(context.Context, error) *gqlerror.Error
+
+type ExtendedError interface {
+ Extensions() map[string]interface{}
+}
+
+func DefaultErrorPresenter(ctx context.Context, err error) *gqlerror.Error {
+ if gqlerr, ok := err.(*gqlerror.Error); ok {
+ gqlerr.Path = GetResolverContext(ctx).Path()
+ return gqlerr
+ }
+
+ var extensions map[string]interface{}
+ if ee, ok := err.(ExtendedError); ok {
+ extensions = ee.Extensions()
+ }
+
+ return &gqlerror.Error{
+ Message: err.Error(),
+ Path: GetResolverContext(ctx).Path(),
+ Extensions: extensions,
+ }
+}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/exec.go b/vendor/github.com/99designs/gqlgen/graphql/exec.go
new file mode 100644
index 00000000..9beb3149
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/graphql/exec.go
@@ -0,0 +1,135 @@
+package graphql
+
+import (
+ "context"
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+type ExecutableSchema interface {
+ Schema() *ast.Schema
+
+ Complexity(typeName, fieldName string, childComplexity int, args map[string]interface{}) (int, bool)
+ Query(ctx context.Context, op *ast.OperationDefinition) *Response
+ Mutation(ctx context.Context, op *ast.OperationDefinition) *Response
+ Subscription(ctx context.Context, op *ast.OperationDefinition) func() *Response
+}
+
+func CollectFields(ctx context.Context, selSet ast.SelectionSet, satisfies []string) []CollectedField {
+ return collectFields(GetRequestContext(ctx), selSet, satisfies, map[string]bool{})
+}
+
+func collectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []string, visited map[string]bool) []CollectedField {
+ var groupedFields []CollectedField
+
+ for _, sel := range selSet {
+ switch sel := sel.(type) {
+ case *ast.Field:
+ if !shouldIncludeNode(sel.Directives, reqCtx.Variables) {
+ continue
+ }
+ f := getOrCreateField(&groupedFields, sel.Alias, func() CollectedField {
+ return CollectedField{Field: sel}
+ })
+
+ f.Selections = append(f.Selections, sel.SelectionSet...)
+ case *ast.InlineFragment:
+ if !shouldIncludeNode(sel.Directives, reqCtx.Variables) || !instanceOf(sel.TypeCondition, satisfies) {
+ continue
+ }
+ for _, childField := range collectFields(reqCtx, sel.SelectionSet, satisfies, visited) {
+ f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
+ f.Selections = append(f.Selections, childField.Selections...)
+ }
+
+ case *ast.FragmentSpread:
+ if !shouldIncludeNode(sel.Directives, reqCtx.Variables) {
+ continue
+ }
+ fragmentName := sel.Name
+ if _, seen := visited[fragmentName]; seen {
+ continue
+ }
+ visited[fragmentName] = true
+
+ fragment := reqCtx.Doc.Fragments.ForName(fragmentName)
+ if fragment == nil {
+ // should never happen, validator has already run
+ panic(fmt.Errorf("missing fragment %s", fragmentName))
+ }
+
+ if !instanceOf(fragment.TypeCondition, satisfies) {
+ continue
+ }
+
+ for _, childField := range collectFields(reqCtx, fragment.SelectionSet, satisfies, visited) {
+ f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
+ f.Selections = append(f.Selections, childField.Selections...)
+ }
+
+ default:
+ panic(fmt.Errorf("unsupported %T", sel))
+ }
+ }
+
+ return groupedFields
+}
+
+type CollectedField struct {
+ *ast.Field
+
+ Selections ast.SelectionSet
+}
+
+func instanceOf(val string, satisfies []string) bool {
+ for _, s := range satisfies {
+ if val == s {
+ return true
+ }
+ }
+ return false
+}
+
+func getOrCreateField(c *[]CollectedField, name string, creator func() CollectedField) *CollectedField {
+ for i, cf := range *c {
+ if cf.Alias == name {
+ return &(*c)[i]
+ }
+ }
+
+ f := creator()
+
+ *c = append(*c, f)
+ return &(*c)[len(*c)-1]
+}
+
+func shouldIncludeNode(directives ast.DirectiveList, variables map[string]interface{}) bool {
+ skip, include := false, true
+
+ if d := directives.ForName("skip"); d != nil {
+ skip = resolveIfArgument(d, variables)
+ }
+
+ if d := directives.ForName("include"); d != nil {
+ include = resolveIfArgument(d, variables)
+ }
+
+ return !skip && include
+}
+
+func resolveIfArgument(d *ast.Directive, variables map[string]interface{}) bool {
+ arg := d.Arguments.ForName("if")
+ if arg == nil {
+ panic(fmt.Sprintf("%s: argument 'if' not defined", d.Name))
+ }
+ value, err := arg.Value.Value(variables)
+ if err != nil {
+ panic(err)
+ }
+ ret, ok := value.(bool)
+ if !ok {
+ panic(fmt.Sprintf("%s: argument 'if' is not a boolean", d.Name))
+ }
+ return ret
+}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/float.go b/vendor/github.com/99designs/gqlgen/graphql/float.go
index c08b490a..d204335c 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/float.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/float.go
@@ -1,6 +1,7 @@
package graphql
import (
+ "encoding/json"
"fmt"
"io"
"strconv"
@@ -18,8 +19,12 @@ func UnmarshalFloat(v interface{}) (float64, error) {
return strconv.ParseFloat(v, 64)
case int:
return float64(v), nil
+ case int64:
+ return float64(v), nil
case float64:
return v, nil
+ case json.Number:
+ return strconv.ParseFloat(string(v), 64)
default:
return 0, fmt.Errorf("%T is not an float", v)
}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/id.go b/vendor/github.com/99designs/gqlgen/graphql/id.go
index 7958670c..a5a7960f 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/id.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/id.go
@@ -1,6 +1,7 @@
package graphql
import (
+ "encoding/json"
"fmt"
"io"
"strconv"
@@ -15,6 +16,8 @@ func UnmarshalID(v interface{}) (string, error) {
switch v := v.(type) {
case string:
return v, nil
+ case json.Number:
+ return string(v), nil
case int:
return strconv.Itoa(v), nil
case float64:
diff --git a/vendor/github.com/vektah/gqlgen/graphql/int.go b/vendor/github.com/99designs/gqlgen/graphql/int.go
index b63b4c2a..ff87574c 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/int.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/int.go
@@ -1,6 +1,7 @@
package graphql
import (
+ "encoding/json"
"fmt"
"io"
"strconv"
@@ -18,8 +19,10 @@ func UnmarshalInt(v interface{}) (int, error) {
return strconv.Atoi(v)
case int:
return v, nil
- case float64:
+ case int64:
return int(v), nil
+ case json.Number:
+ return strconv.Atoi(string(v))
default:
return 0, fmt.Errorf("%T is not an int", v)
}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/introspection/introspection.go b/vendor/github.com/99designs/gqlgen/graphql/introspection/introspection.go
new file mode 100644
index 00000000..baff882e
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/graphql/introspection/introspection.go
@@ -0,0 +1,58 @@
+// introspection implements the spec defined in https://github.com/facebook/graphql/blob/master/spec/Section%204%20--%20Introspection.md#schema-introspection
+package introspection
+
+import "github.com/vektah/gqlparser/ast"
+
+type (
+ Directive struct {
+ Name string
+ Description string
+ Locations []string
+ Args []InputValue
+ }
+
+ EnumValue struct {
+ Name string
+ Description string
+ IsDeprecated bool
+ DeprecationReason string
+ }
+
+ Field struct {
+ Name string
+ Description string
+ Type *Type
+ Args []InputValue
+ IsDeprecated bool
+ DeprecationReason string
+ }
+
+ InputValue struct {
+ Name string
+ Description string
+ DefaultValue *string
+ Type *Type
+ }
+)
+
+func WrapSchema(schema *ast.Schema) *Schema {
+ return &Schema{schema: schema}
+}
+
+func isDeprecated(directives ast.DirectiveList) bool {
+ return directives.ForName("deprecated") != nil
+}
+
+func deprecationReason(directives ast.DirectiveList) string {
+ deprecation := directives.ForName("deprecated")
+ if deprecation == nil {
+ return ""
+ }
+
+ reason := deprecation.Arguments.ForName("reason")
+ if reason == nil {
+ return ""
+ }
+
+ return reason.Value.Raw
+}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/introspection/query.go b/vendor/github.com/99designs/gqlgen/graphql/introspection/query.go
index b1e4fbc6..b1e4fbc6 100644
--- a/vendor/github.com/vektah/gqlgen/neelance/introspection/query.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/introspection/query.go
diff --git a/vendor/github.com/99designs/gqlgen/graphql/introspection/schema.go b/vendor/github.com/99designs/gqlgen/graphql/introspection/schema.go
new file mode 100644
index 00000000..b5d2c482
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/graphql/introspection/schema.go
@@ -0,0 +1,68 @@
+package introspection
+
+import (
+ "strings"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+type Schema struct {
+ schema *ast.Schema
+}
+
+func (s *Schema) Types() []Type {
+ var types []Type
+ for _, typ := range s.schema.Types {
+ if strings.HasPrefix(typ.Name, "__") {
+ continue
+ }
+ types = append(types, *WrapTypeFromDef(s.schema, typ))
+ }
+ return types
+}
+
+func (s *Schema) QueryType() *Type {
+ return WrapTypeFromDef(s.schema, s.schema.Query)
+}
+
+func (s *Schema) MutationType() *Type {
+ return WrapTypeFromDef(s.schema, s.schema.Mutation)
+}
+
+func (s *Schema) SubscriptionType() *Type {
+ return WrapTypeFromDef(s.schema, s.schema.Subscription)
+}
+
+func (s *Schema) Directives() []Directive {
+ var res []Directive
+
+ for _, d := range s.schema.Directives {
+ res = append(res, s.directiveFromDef(d))
+ }
+
+ return res
+}
+
+func (s *Schema) directiveFromDef(d *ast.DirectiveDefinition) Directive {
+ var locs []string
+ for _, loc := range d.Locations {
+ locs = append(locs, string(loc))
+ }
+
+ var args []InputValue
+ for _, arg := range d.Arguments {
+ args = append(args, InputValue{
+ Name: arg.Name,
+ Description: arg.Description,
+ DefaultValue: defaultValue(arg.DefaultValue),
+ Type: WrapTypeFromType(s.schema, arg.Type),
+ })
+ }
+
+ return Directive{
+ Name: d.Name,
+ Description: d.Description,
+ Locations: locs,
+ Args: args,
+ }
+}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/introspection/type.go b/vendor/github.com/99designs/gqlgen/graphql/introspection/type.go
new file mode 100644
index 00000000..dce144e0
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/graphql/introspection/type.go
@@ -0,0 +1,174 @@
+package introspection
+
+import (
+ "strings"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+type Type struct {
+ schema *ast.Schema
+ def *ast.Definition
+ typ *ast.Type
+}
+
+func WrapTypeFromDef(s *ast.Schema, def *ast.Definition) *Type {
+ if def == nil {
+ return nil
+ }
+ return &Type{schema: s, def: def}
+}
+
+func WrapTypeFromType(s *ast.Schema, typ *ast.Type) *Type {
+ if typ == nil {
+ return nil
+ }
+
+ if !typ.NonNull && typ.NamedType != "" {
+ return &Type{schema: s, def: s.Types[typ.NamedType]}
+ }
+ return &Type{schema: s, typ: typ}
+}
+
+func (t *Type) Kind() string {
+ if t.typ != nil {
+ if t.typ.NonNull {
+ return "NON_NULL"
+ }
+
+ if t.typ.Elem != nil {
+ return "LIST"
+ }
+ } else {
+ return string(t.def.Kind)
+ }
+
+ panic("UNKNOWN")
+}
+
+func (t *Type) Name() *string {
+ if t.def == nil {
+ return nil
+ }
+ return &t.def.Name
+}
+
+func (t *Type) Description() string {
+ if t.def == nil {
+ return ""
+ }
+ return t.def.Description
+}
+
+func (t *Type) Fields(includeDeprecated bool) []Field {
+ if t.def == nil || (t.def.Kind != ast.Object && t.def.Kind != ast.Interface) {
+ return nil
+ }
+ var fields []Field
+ for _, f := range t.def.Fields {
+ if strings.HasPrefix(f.Name, "__") {
+ continue
+ }
+
+ var args []InputValue
+ for _, arg := range f.Arguments {
+ args = append(args, InputValue{
+ Type: WrapTypeFromType(t.schema, arg.Type),
+ Name: arg.Name,
+ Description: arg.Description,
+ DefaultValue: defaultValue(arg.DefaultValue),
+ })
+ }
+
+ fields = append(fields, Field{
+ Name: f.Name,
+ Description: f.Description,
+ Args: args,
+ Type: WrapTypeFromType(t.schema, f.Type),
+ IsDeprecated: isDeprecated(f.Directives),
+ DeprecationReason: deprecationReason(f.Directives),
+ })
+ }
+ return fields
+}
+
+func (t *Type) InputFields() []InputValue {
+ if t.def == nil || t.def.Kind != ast.InputObject {
+ return nil
+ }
+
+ var res []InputValue
+ for _, f := range t.def.Fields {
+ res = append(res, InputValue{
+ Name: f.Name,
+ Description: f.Description,
+ Type: WrapTypeFromType(t.schema, f.Type),
+ DefaultValue: defaultValue(f.DefaultValue),
+ })
+ }
+ return res
+}
+
+func defaultValue(value *ast.Value) *string {
+ if value == nil {
+ return nil
+ }
+ val := value.String()
+ return &val
+}
+
+func (t *Type) Interfaces() []Type {
+ if t.def == nil || t.def.Kind != ast.Object {
+ return nil
+ }
+
+ var res []Type
+ for _, intf := range t.def.Interfaces {
+ res = append(res, *WrapTypeFromDef(t.schema, t.schema.Types[intf]))
+ }
+
+ return res
+}
+
+func (t *Type) PossibleTypes() []Type {
+ if t.def == nil || (t.def.Kind != ast.Interface && t.def.Kind != ast.Union) {
+ return nil
+ }
+
+ var res []Type
+ for _, pt := range t.schema.GetPossibleTypes(t.def) {
+ res = append(res, *WrapTypeFromDef(t.schema, pt))
+ }
+ return res
+}
+
+func (t *Type) EnumValues(includeDeprecated bool) []EnumValue {
+ if t.def == nil || t.def.Kind != ast.Enum {
+ return nil
+ }
+
+ var res []EnumValue
+ for _, val := range t.def.EnumValues {
+ res = append(res, EnumValue{
+ Name: val.Name,
+ Description: val.Description,
+ IsDeprecated: isDeprecated(val.Directives),
+ DeprecationReason: deprecationReason(val.Directives),
+ })
+ }
+ return res
+}
+
+func (t *Type) OfType() *Type {
+ if t.typ == nil {
+ return nil
+ }
+ if t.typ.NonNull {
+ // fake non null nodes
+ cpy := *t.typ
+ cpy.NonNull = false
+
+ return WrapTypeFromType(t.schema, &cpy)
+ }
+ return WrapTypeFromType(t.schema, t.typ.Elem)
+}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/jsonw.go b/vendor/github.com/99designs/gqlgen/graphql/jsonw.go
index ef9e69c7..c112444a 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/jsonw.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/jsonw.go
@@ -15,9 +15,9 @@ var closeBracket = []byte(`]`)
var colon = []byte(`:`)
var comma = []byte(`,`)
-var Null = lit(nullLit)
-var True = lit(trueLit)
-var False = lit(falseLit)
+var Null = &lit{nullLit}
+var True = &lit{trueLit}
+var False = &lit{falseLit}
type Marshaler interface {
MarshalGQL(w io.Writer)
@@ -76,8 +76,8 @@ func (a Array) MarshalGQL(writer io.Writer) {
writer.Write(closeBracket)
}
-func lit(b []byte) Marshaler {
- return WriterFunc(func(w io.Writer) {
- w.Write(b)
- })
+type lit struct{ b []byte }
+
+func (l lit) MarshalGQL(w io.Writer) {
+ w.Write(l.b)
}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/map.go b/vendor/github.com/99designs/gqlgen/graphql/map.go
index 1e91d1d9..1e91d1d9 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/map.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/map.go
diff --git a/vendor/github.com/vektah/gqlgen/graphql/oneshot.go b/vendor/github.com/99designs/gqlgen/graphql/oneshot.go
index dd31f5ba..dd31f5ba 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/oneshot.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/oneshot.go
diff --git a/vendor/github.com/vektah/gqlgen/graphql/recovery.go b/vendor/github.com/99designs/gqlgen/graphql/recovery.go
index 3aa032dc..3aa032dc 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/recovery.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/recovery.go
diff --git a/vendor/github.com/vektah/gqlgen/graphql/response.go b/vendor/github.com/99designs/gqlgen/graphql/response.go
index c0dc1c23..18664dca 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/response.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/response.go
@@ -4,15 +4,17 @@ import (
"context"
"encoding/json"
"fmt"
+
+ "github.com/vektah/gqlparser/gqlerror"
)
type Response struct {
Data json.RawMessage `json:"data"`
- Errors []*Error `json:"errors,omitempty"`
+ Errors gqlerror.List `json:"errors,omitempty"`
}
func ErrorResponse(ctx context.Context, messagef string, args ...interface{}) *Response {
return &Response{
- Errors: []*Error{{Message: fmt.Sprintf(messagef, args...)}},
+ Errors: gqlerror.List{{Message: fmt.Sprintf(messagef, args...)}},
}
}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/string.go b/vendor/github.com/99designs/gqlgen/graphql/string.go
index d5fb3294..d5fb3294 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/string.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/string.go
diff --git a/vendor/github.com/vektah/gqlgen/graphql/time.go b/vendor/github.com/99designs/gqlgen/graphql/time.go
index 4f448560..4f448560 100644
--- a/vendor/github.com/vektah/gqlgen/graphql/time.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/time.go
diff --git a/vendor/github.com/99designs/gqlgen/graphql/version.go b/vendor/github.com/99designs/gqlgen/graphql/version.go
new file mode 100644
index 00000000..38d3720b
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/graphql/version.go
@@ -0,0 +1,3 @@
+package graphql
+
+const Version = "v0.5.1"
diff --git a/vendor/github.com/vektah/gqlgen/handler/graphql.go b/vendor/github.com/99designs/gqlgen/handler/graphql.go
index 4a5c61f5..9d222826 100644
--- a/vendor/github.com/vektah/gqlgen/handler/graphql.go
+++ b/vendor/github.com/99designs/gqlgen/handler/graphql.go
@@ -4,14 +4,18 @@ import (
"context"
"encoding/json"
"fmt"
+ "io"
"net/http"
"strings"
+ "github.com/99designs/gqlgen/complexity"
+ "github.com/99designs/gqlgen/graphql"
"github.com/gorilla/websocket"
- "github.com/vektah/gqlgen/graphql"
- "github.com/vektah/gqlgen/neelance/errors"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/validation"
+ "github.com/hashicorp/golang-lru"
+ "github.com/vektah/gqlparser"
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/validator"
)
type params struct {
@@ -21,14 +25,16 @@ type params struct {
}
type Config struct {
- upgrader websocket.Upgrader
- recover graphql.RecoverFunc
- errorPresenter graphql.ErrorPresenterFunc
- resolverHook graphql.ResolverMiddleware
- requestHook graphql.RequestMiddleware
+ cacheSize int
+ upgrader websocket.Upgrader
+ recover graphql.RecoverFunc
+ errorPresenter graphql.ErrorPresenterFunc
+ resolverHook graphql.FieldMiddleware
+ requestHook graphql.RequestMiddleware
+ complexityLimit int
}
-func (c *Config) newRequestContext(doc *query.Document, query string, variables map[string]interface{}) *graphql.RequestContext {
+func (c *Config) newRequestContext(doc *ast.QueryDocument, query string, variables map[string]interface{}) *graphql.RequestContext {
reqCtx := graphql.NewRequestContext(doc, query, variables)
if hook := c.recover; hook != nil {
reqCtx.Recover = hook
@@ -72,11 +78,17 @@ func ErrorPresenter(f graphql.ErrorPresenterFunc) Option {
}
}
+// ComplexityLimit sets a maximum query complexity that is allowed to be executed.
+// If a query is submitted that exceeds the limit, a 422 status code will be returned.
+func ComplexityLimit(limit int) Option {
+ return func(cfg *Config) {
+ cfg.complexityLimit = limit
+ }
+}
+
// ResolverMiddleware allows you to define a function that will be called around every resolver,
// useful for tracing and logging.
-// It will only be called for user defined resolvers, any direct binding to models is assumed
-// to cost nothing.
-func ResolverMiddleware(middleware graphql.ResolverMiddleware) Option {
+func ResolverMiddleware(middleware graphql.FieldMiddleware) Option {
return func(cfg *Config) {
if cfg.resolverHook == nil {
cfg.resolverHook = middleware
@@ -110,8 +122,19 @@ func RequestMiddleware(middleware graphql.RequestMiddleware) Option {
}
}
+// CacheSize sets the maximum size of the query cache.
+// If size is less than or equal to 0, the cache is disabled.
+func CacheSize(size int) Option {
+ return func(cfg *Config) {
+ cfg.cacheSize = size
+ }
+}
+
+const DefaultCacheSize = 1000
+
func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc {
cfg := Config{
+ cacheSize: DefaultCacheSize,
upgrader: websocket.Upgrader{
ReadBufferSize: 1024,
WriteBufferSize: 1024,
@@ -122,6 +145,17 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
option(&cfg)
}
+ var cache *lru.Cache
+ if cfg.cacheSize > 0 {
+ var err error
+ cache, err = lru.New(DefaultCacheSize)
+ if err != nil {
+ // An error is only returned for non-positive cache size
+ // and we already checked for that.
+ panic("unexpected error creating cache: " + err.Error())
+ }
+ }
+
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodOptions {
w.Header().Set("Allow", "OPTIONS, GET, POST")
@@ -141,13 +175,13 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
reqParams.OperationName = r.URL.Query().Get("operationName")
if variables := r.URL.Query().Get("variables"); variables != "" {
- if err := json.Unmarshal([]byte(variables), &reqParams.Variables); err != nil {
+ if err := jsonDecode(strings.NewReader(variables), &reqParams.Variables); err != nil {
sendErrorf(w, http.StatusBadRequest, "variables could not be decoded")
return
}
}
case http.MethodPost:
- if err := json.NewDecoder(r.Body).Decode(&reqParams); err != nil {
+ if err := jsonDecode(r.Body, &reqParams); err != nil {
sendErrorf(w, http.StatusBadRequest, "json body could not be decoded: "+err.Error())
return
}
@@ -157,25 +191,42 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
}
w.Header().Set("Content-Type", "application/json")
- doc, qErr := query.Parse(reqParams.Query)
- if qErr != nil {
- sendError(w, http.StatusUnprocessableEntity, qErr)
+ var doc *ast.QueryDocument
+ if cache != nil {
+ val, ok := cache.Get(reqParams.Query)
+ if ok {
+ doc = val.(*ast.QueryDocument)
+ }
+ }
+ if doc == nil {
+ var qErr gqlerror.List
+ doc, qErr = gqlparser.LoadQuery(exec.Schema(), reqParams.Query)
+ if len(qErr) > 0 {
+ sendError(w, http.StatusUnprocessableEntity, qErr...)
+ return
+ }
+ if cache != nil {
+ cache.Add(reqParams.Query, doc)
+ }
+ }
+
+ op := doc.Operations.ForName(reqParams.OperationName)
+ if op == nil {
+ sendErrorf(w, http.StatusUnprocessableEntity, "operation %s not found", reqParams.OperationName)
return
}
- errs := validation.Validate(exec.Schema(), doc)
- if len(errs) != 0 {
- sendError(w, http.StatusUnprocessableEntity, errs...)
+ if op.Operation != ast.Query && r.Method == http.MethodGet {
+ sendErrorf(w, http.StatusUnprocessableEntity, "GET requests only allow query operations")
return
}
- op, err := doc.GetOperation(reqParams.OperationName)
+ vars, err := validator.VariableValues(exec.Schema(), op, reqParams.Variables)
if err != nil {
- sendErrorf(w, http.StatusUnprocessableEntity, err.Error())
+ sendError(w, http.StatusUnprocessableEntity, err)
return
}
-
- reqCtx := cfg.newRequestContext(doc, reqParams.Query, reqParams.Variables)
+ reqCtx := cfg.newRequestContext(doc, reqParams.Query, vars)
ctx := graphql.WithRequestContext(r.Context(), reqCtx)
defer func() {
@@ -185,14 +236,22 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
}
}()
- switch op.Type {
- case query.Query:
+ if cfg.complexityLimit > 0 {
+ queryComplexity := complexity.Calculate(exec, op, vars)
+ if queryComplexity > cfg.complexityLimit {
+ sendErrorf(w, http.StatusUnprocessableEntity, "query has complexity %d, which exceeds the limit of %d", queryComplexity, cfg.complexityLimit)
+ return
+ }
+ }
+
+ switch op.Operation {
+ case ast.Query:
b, err := json.Marshal(exec.Query(ctx, op))
if err != nil {
panic(err)
}
w.Write(b)
- case query.Mutation:
+ case ast.Mutation:
b, err := json.Marshal(exec.Mutation(ctx, op))
if err != nil {
panic(err)
@@ -204,26 +263,15 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
})
}
-func sendError(w http.ResponseWriter, code int, errors ...*errors.QueryError) {
- w.WriteHeader(code)
- var errs []*graphql.Error
- for _, err := range errors {
- var locations []graphql.ErrorLocation
- for _, l := range err.Locations {
- fmt.Println(graphql.ErrorLocation(l))
- locations = append(locations, graphql.ErrorLocation{
- Line: l.Line,
- Column: l.Column,
- })
- }
+func jsonDecode(r io.Reader, val interface{}) error {
+ dec := json.NewDecoder(r)
+ dec.UseNumber()
+ return dec.Decode(val)
+}
- errs = append(errs, &graphql.Error{
- Message: err.Message,
- Path: err.Path,
- Locations: locations,
- })
- }
- b, err := json.Marshal(&graphql.Response{Errors: errs})
+func sendError(w http.ResponseWriter, code int, errors ...*gqlerror.Error) {
+ w.WriteHeader(code)
+ b, err := json.Marshal(&graphql.Response{Errors: errors})
if err != nil {
panic(err)
}
@@ -231,5 +279,5 @@ func sendError(w http.ResponseWriter, code int, errors ...*errors.QueryError) {
}
func sendErrorf(w http.ResponseWriter, code int, format string, args ...interface{}) {
- sendError(w, code, &errors.QueryError{Message: fmt.Sprintf(format, args...)})
+ sendError(w, code, &gqlerror.Error{Message: fmt.Sprintf(format, args...)})
}
diff --git a/vendor/github.com/vektah/gqlgen/handler/playground.go b/vendor/github.com/99designs/gqlgen/handler/playground.go
index 44533590..d0ada8ca 100644
--- a/vendor/github.com/vektah/gqlgen/handler/playground.go
+++ b/vendor/github.com/99designs/gqlgen/handler/playground.go
@@ -30,6 +30,9 @@ var page = template.Must(template.New("graphiql").Parse(`<!DOCTYPE html>
GraphQLPlayground.init(root, {
endpoint: location.protocol + '//' + location.host + '{{.endpoint}}',
subscriptionsEndpoint: wsProto + '//' + location.host + '{{.endpoint }}',
+ settings: {
+ 'request.credentials': 'same-origin'
+ }
})
})
</script>
@@ -42,7 +45,7 @@ func Playground(title string, endpoint string) http.HandlerFunc {
err := page.Execute(w, map[string]string{
"title": title,
"endpoint": endpoint,
- "version": "1.4.3",
+ "version": "1.6.2",
})
if err != nil {
panic(err)
diff --git a/vendor/github.com/99designs/gqlgen/handler/stub.go b/vendor/github.com/99designs/gqlgen/handler/stub.go
new file mode 100644
index 00000000..d237e188
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/handler/stub.go
@@ -0,0 +1,51 @@
+package handler
+
+import (
+ "context"
+
+ "github.com/99designs/gqlgen/graphql"
+ "github.com/vektah/gqlparser"
+ "github.com/vektah/gqlparser/ast"
+)
+
+type executableSchemaStub struct {
+ NextResp chan struct{}
+}
+
+var _ graphql.ExecutableSchema = &executableSchemaStub{}
+
+func (e *executableSchemaStub) Schema() *ast.Schema {
+ return gqlparser.MustLoadSchema(&ast.Source{Input: `
+ schema { query: Query }
+ type Query {
+ me: User!
+ user(id: Int): User!
+ }
+ type User { name: String! }
+ `})
+}
+
+func (e *executableSchemaStub) Complexity(typeName, field string, childComplexity int, args map[string]interface{}) (int, bool) {
+ return 0, false
+}
+
+func (e *executableSchemaStub) Query(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
+ return &graphql.Response{Data: []byte(`{"name":"test"}`)}
+}
+
+func (e *executableSchemaStub) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
+ return graphql.ErrorResponse(ctx, "mutations are not supported")
+}
+
+func (e *executableSchemaStub) Subscription(ctx context.Context, op *ast.OperationDefinition) func() *graphql.Response {
+ return func() *graphql.Response {
+ select {
+ case <-ctx.Done():
+ return nil
+ case <-e.NextResp:
+ return &graphql.Response{
+ Data: []byte(`{"name":"test"}`),
+ }
+ }
+ }
+}
diff --git a/vendor/github.com/vektah/gqlgen/handler/websocket.go b/vendor/github.com/99designs/gqlgen/handler/websocket.go
index e80748ca..2be1e87f 100644
--- a/vendor/github.com/vektah/gqlgen/handler/websocket.go
+++ b/vendor/github.com/99designs/gqlgen/handler/websocket.go
@@ -1,6 +1,7 @@
package handler
import (
+ "bytes"
"context"
"encoding/json"
"fmt"
@@ -8,11 +9,12 @@ import (
"net/http"
"sync"
+ "github.com/99designs/gqlgen/graphql"
"github.com/gorilla/websocket"
- "github.com/vektah/gqlgen/graphql"
- "github.com/vektah/gqlgen/neelance/errors"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/validation"
+ "github.com/vektah/gqlparser"
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/validator"
)
const (
@@ -113,7 +115,7 @@ func (c *wsConnection) run() {
closer := c.active[message.ID]
c.mu.Unlock()
if closer == nil {
- c.sendError(message.ID, errors.Errorf("%s is not running, cannot stop", message.ID))
+ c.sendError(message.ID, gqlerror.Errorf("%s is not running, cannot stop", message.ID))
continue
}
@@ -131,35 +133,34 @@ func (c *wsConnection) run() {
func (c *wsConnection) subscribe(message *operationMessage) bool {
var reqParams params
- if err := json.Unmarshal(message.Payload, &reqParams); err != nil {
+ if err := jsonDecode(bytes.NewReader(message.Payload), &reqParams); err != nil {
c.sendConnectionError("invalid json")
return false
}
- doc, qErr := query.Parse(reqParams.Query)
+ doc, qErr := gqlparser.LoadQuery(c.exec.Schema(), reqParams.Query)
if qErr != nil {
- c.sendError(message.ID, qErr)
+ c.sendError(message.ID, qErr...)
return true
}
- errs := validation.Validate(c.exec.Schema(), doc)
- if len(errs) != 0 {
- c.sendError(message.ID, errs...)
+ op := doc.Operations.ForName(reqParams.OperationName)
+ if op == nil {
+ c.sendError(message.ID, gqlerror.Errorf("operation %s not found", reqParams.OperationName))
return true
}
- op, err := doc.GetOperation(reqParams.OperationName)
+ vars, err := validator.VariableValues(c.exec.Schema(), op, reqParams.Variables)
if err != nil {
- c.sendError(message.ID, errors.Errorf("%s", err.Error()))
+ c.sendError(message.ID, err)
return true
}
-
- reqCtx := c.cfg.newRequestContext(doc, reqParams.Query, reqParams.Variables)
+ reqCtx := c.cfg.newRequestContext(doc, reqParams.Query, vars)
ctx := graphql.WithRequestContext(c.ctx, reqCtx)
- if op.Type != query.Subscription {
+ if op.Operation != ast.Subscription {
var result *graphql.Response
- if op.Type == query.Query {
+ if op.Operation == ast.Query {
result = c.exec.Query(ctx, op)
} else {
result = c.exec.Mutation(ctx, op)
@@ -178,7 +179,7 @@ func (c *wsConnection) subscribe(message *operationMessage) bool {
defer func() {
if r := recover(); r != nil {
userErr := reqCtx.Recover(ctx, r)
- c.sendError(message.ID, &errors.QueryError{Message: userErr.Error()})
+ c.sendError(message.ID, &gqlerror.Error{Message: userErr.Error()})
}
}()
next := c.exec.Subscription(ctx, op)
@@ -200,14 +201,14 @@ func (c *wsConnection) subscribe(message *operationMessage) bool {
func (c *wsConnection) sendData(id string, response *graphql.Response) {
b, err := json.Marshal(response)
if err != nil {
- c.sendError(id, errors.Errorf("unable to encode json response: %s", err.Error()))
+ c.sendError(id, gqlerror.Errorf("unable to encode json response: %s", err.Error()))
return
}
c.write(&operationMessage{Type: dataMsg, ID: id, Payload: b})
}
-func (c *wsConnection) sendError(id string, errors ...*errors.QueryError) {
+func (c *wsConnection) sendError(id string, errors ...*gqlerror.Error) {
var errs []error
for _, err := range errors {
errs = append(errs, err)
@@ -220,7 +221,7 @@ func (c *wsConnection) sendError(id string, errors ...*errors.QueryError) {
}
func (c *wsConnection) sendConnectionError(format string, args ...interface{}) {
- b, err := json.Marshal(&graphql.Error{Message: fmt.Sprintf(format, args...)})
+ b, err := json.Marshal(&gqlerror.Error{Message: fmt.Sprintf(format, args...)})
if err != nil {
panic(err)
}
@@ -229,11 +230,17 @@ func (c *wsConnection) sendConnectionError(format string, args ...interface{}) {
}
func (c *wsConnection) readOp() *operationMessage {
+ _, r, err := c.conn.NextReader()
+ if err != nil {
+ c.sendConnectionError("invalid json")
+ return nil
+ }
message := operationMessage{}
- if err := c.conn.ReadJSON(&message); err != nil {
+ if err := jsonDecode(r, &message); err != nil {
c.sendConnectionError("invalid json")
return nil
}
+
return &message
}
diff --git a/vendor/github.com/99designs/gqlgen/internal/gopath/gopath.go b/vendor/github.com/99designs/gqlgen/internal/gopath/gopath.go
new file mode 100644
index 00000000..c9b66167
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/internal/gopath/gopath.go
@@ -0,0 +1,37 @@
+package gopath
+
+import (
+ "fmt"
+ "go/build"
+ "path/filepath"
+ "strings"
+)
+
+var NotFound = fmt.Errorf("not on GOPATH")
+
+// Contains returns true if the given directory is in the GOPATH
+func Contains(dir string) bool {
+ _, err := Dir2Import(dir)
+ return err == nil
+}
+
+// Dir2Import takes an *absolute* path and returns a golang import path for the package, and returns an error if it isn't on the gopath
+func Dir2Import(dir string) (string, error) {
+ dir = filepath.ToSlash(dir)
+ for _, gopath := range filepath.SplitList(build.Default.GOPATH) {
+ gopath = filepath.ToSlash(filepath.Join(gopath, "src"))
+ if len(gopath) < len(dir) && strings.EqualFold(gopath, dir[0:len(gopath)]) {
+ return dir[len(gopath)+1:], nil
+ }
+ }
+ return "", NotFound
+}
+
+// MustDir2Import takes an *absolute* path and returns a golang import path for the package, and panics if it isn't on the gopath
+func MustDir2Import(dir string) string {
+ pkg, err := Dir2Import(dir)
+ if err != nil {
+ panic(err)
+ }
+ return pkg
+}
diff --git a/vendor/github.com/agnivade/levenshtein/.gitignore b/vendor/github.com/agnivade/levenshtein/.gitignore
new file mode 100644
index 00000000..345780a4
--- /dev/null
+++ b/vendor/github.com/agnivade/levenshtein/.gitignore
@@ -0,0 +1,5 @@
+coverage.txt
+fuzz/fuzz-fuzz.zip
+fuzz/corpus/corpus/*
+fuzz/corpus/suppressions/*
+fuzz/corpus/crashes/*
diff --git a/vendor/github.com/agnivade/levenshtein/.travis.yml b/vendor/github.com/agnivade/levenshtein/.travis.yml
new file mode 100644
index 00000000..06b3ba55
--- /dev/null
+++ b/vendor/github.com/agnivade/levenshtein/.travis.yml
@@ -0,0 +1,7 @@
+language: go
+
+go:
+- 1.8.x
+- 1.9.x
+- 1.10.x
+- tip
diff --git a/vendor/github.com/agnivade/levenshtein/License.txt b/vendor/github.com/agnivade/levenshtein/License.txt
new file mode 100644
index 00000000..54b51f49
--- /dev/null
+++ b/vendor/github.com/agnivade/levenshtein/License.txt
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Agniva De Sarker
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/agnivade/levenshtein/Makefile b/vendor/github.com/agnivade/levenshtein/Makefile
new file mode 100644
index 00000000..4bef27dd
--- /dev/null
+++ b/vendor/github.com/agnivade/levenshtein/Makefile
@@ -0,0 +1,13 @@
+all: test install
+
+install:
+ go install
+
+lint:
+ gofmt -l -s -w . && go tool vet -all . && golint
+
+test:
+ go test -race -v -coverprofile=coverage.txt -covermode=atomic
+
+bench:
+ go test -run=XXX -bench=. -benchmem
diff --git a/vendor/github.com/agnivade/levenshtein/README.md b/vendor/github.com/agnivade/levenshtein/README.md
new file mode 100644
index 00000000..b0fd81df
--- /dev/null
+++ b/vendor/github.com/agnivade/levenshtein/README.md
@@ -0,0 +1,57 @@
+levenshtein [![Build Status](https://travis-ci.org/agnivade/levenshtein.svg?branch=master)](https://travis-ci.org/agnivade/levenshtein) [![Go Report Card](https://goreportcard.com/badge/github.com/agnivade/levenshtein)](https://goreportcard.com/report/github.com/agnivade/levenshtein) [![GoDoc](https://godoc.org/github.com/agnivade/levenshtein?status.svg)](https://godoc.org/github.com/agnivade/levenshtein)
+===========
+
+[Go](http://golang.org) package to calculate the [Levenshtein Distance](http://en.wikipedia.org/wiki/Levenshtein_distance)
+
+The library is fully capable of working with non-ascii strings. But the strings are not normalized. That is left as a user-dependant use case. Please normalize the strings before passing it to the library if you have such a requirement.
+- https://blog.golang.org/normalization
+
+Install
+-------
+
+ go get github.com/agnivade/levenshtein
+
+Example
+-------
+
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/agnivade/levenshtein"
+)
+
+func main() {
+ s1 := "kitten"
+ s2 := "sitting"
+ distance := levenshtein.ComputeDistance(s1, s2)
+ fmt.Printf("The distance between %s and %s is %d.\n", s1, s2, distance)
+ // Output:
+ // The distance between kitten and sitting is 3.
+}
+
+```
+
+Benchmarks
+----------
+
+```
+name time/op
+Simple/ASCII-4 537ns ± 2%
+Simple/French-4 956ns ± 0%
+Simple/Nordic-4 1.95µs ± 1%
+Simple/Tibetan-4 1.53µs ± 2%
+
+name alloc/op
+Simple/ASCII-4 96.0B ± 0%
+Simple/French-4 128B ± 0%
+Simple/Nordic-4 192B ± 0%
+Simple/Tibetan-4 144B ± 0%
+
+name allocs/op
+Simple/ASCII-4 1.00 ± 0%
+Simple/French-4 1.00 ± 0%
+Simple/Nordic-4 1.00 ± 0%
+Simple/Tibetan-4 1.00 ± 0%
+```
diff --git a/vendor/github.com/agnivade/levenshtein/go.mod b/vendor/github.com/agnivade/levenshtein/go.mod
new file mode 100644
index 00000000..b2921fb3
--- /dev/null
+++ b/vendor/github.com/agnivade/levenshtein/go.mod
@@ -0,0 +1 @@
+module github.com/agnivade/levenshtein
diff --git a/vendor/github.com/agnivade/levenshtein/levenshtein.go b/vendor/github.com/agnivade/levenshtein/levenshtein.go
new file mode 100644
index 00000000..e215813f
--- /dev/null
+++ b/vendor/github.com/agnivade/levenshtein/levenshtein.go
@@ -0,0 +1,71 @@
+// Package levenshtein is a Go implementation to calculate Levenshtein Distance.
+//
+// Implementation taken from
+// https://gist.github.com/andrei-m/982927#gistcomment-1931258
+package levenshtein
+
+// ComputeDistance computes the levenshtein distance between the two
+// strings passed as an argument. The return value is the levenshtein distance
+//
+// Works on runes (Unicode code points) but does not normalize
+// the input strings. See https://blog.golang.org/normalization
+// and the golang.org/x/text/unicode/norm pacage.
+func ComputeDistance(a, b string) int {
+ if a == b {
+ return 0
+ }
+
+ // We need to convert to []rune if the strings are non-ascii.
+ // This could be avoided by using utf8.RuneCountInString
+ // and then doing some juggling with rune indices.
+ // The primary challenge is keeping track of the previous rune.
+ // With a range loop, its not that easy. And with a for-loop
+ // we need to keep track of the inter-rune width using utf8.DecodeRuneInString
+ s1 := []rune(a)
+ s2 := []rune(b)
+
+ // swap to save some memory O(min(a,b)) instead of O(a)
+ if len(s1) > len(s2) {
+ s1, s2 = s2, s1
+ }
+ lenS1 := len(s1)
+ lenS2 := len(s2)
+
+ // init the row
+ x := make([]int, lenS1+1)
+ for i := 0; i <= lenS1; i++ {
+ x[i] = i
+ }
+
+ // fill in the rest
+ for i := 1; i <= lenS2; i++ {
+ prev := i
+ var current int
+
+ for j := 1; j <= lenS1; j++ {
+
+ if s2[i-1] == s1[j-1] {
+ current = x[j-1] // match
+ } else {
+ current = min(x[j-1]+1, prev+1, x[j]+1)
+ }
+ x[j-1] = prev
+ prev = current
+ }
+ x[lenS1] = prev
+ }
+ return x[lenS1]
+}
+
+func min(a, b, c int) int {
+ if a < b {
+ if a < c {
+ return a
+ }
+ } else {
+ if b < c {
+ return b
+ }
+ }
+ return c
+}
diff --git a/vendor/github.com/hashicorp/golang-lru/.gitignore b/vendor/github.com/hashicorp/golang-lru/.gitignore
new file mode 100644
index 00000000..83656241
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/.gitignore
@@ -0,0 +1,23 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
diff --git a/vendor/github.com/hashicorp/golang-lru/2q.go b/vendor/github.com/hashicorp/golang-lru/2q.go
new file mode 100644
index 00000000..e474cd07
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/2q.go
@@ -0,0 +1,223 @@
+package lru
+
+import (
+ "fmt"
+ "sync"
+
+ "github.com/hashicorp/golang-lru/simplelru"
+)
+
+const (
+ // Default2QRecentRatio is the ratio of the 2Q cache dedicated
+ // to recently added entries that have only been accessed once.
+ Default2QRecentRatio = 0.25
+
+ // Default2QGhostEntries is the default ratio of ghost
+ // entries kept to track entries recently evicted
+ Default2QGhostEntries = 0.50
+)
+
+// TwoQueueCache is a thread-safe fixed size 2Q cache.
+// 2Q is an enhancement over the standard LRU cache
+// in that it tracks both frequently and recently used
+// entries separately. This avoids a burst in access to new
+// entries from evicting frequently used entries. It adds some
+// additional tracking overhead to the standard LRU cache, and is
+// computationally about 2x the cost, and adds some metadata over
+// head. The ARCCache is similar, but does not require setting any
+// parameters.
+type TwoQueueCache struct {
+ size int
+ recentSize int
+
+ recent simplelru.LRUCache
+ frequent simplelru.LRUCache
+ recentEvict simplelru.LRUCache
+ lock sync.RWMutex
+}
+
+// New2Q creates a new TwoQueueCache using the default
+// values for the parameters.
+func New2Q(size int) (*TwoQueueCache, error) {
+ return New2QParams(size, Default2QRecentRatio, Default2QGhostEntries)
+}
+
+// New2QParams creates a new TwoQueueCache using the provided
+// parameter values.
+func New2QParams(size int, recentRatio float64, ghostRatio float64) (*TwoQueueCache, error) {
+ if size <= 0 {
+ return nil, fmt.Errorf("invalid size")
+ }
+ if recentRatio < 0.0 || recentRatio > 1.0 {
+ return nil, fmt.Errorf("invalid recent ratio")
+ }
+ if ghostRatio < 0.0 || ghostRatio > 1.0 {
+ return nil, fmt.Errorf("invalid ghost ratio")
+ }
+
+ // Determine the sub-sizes
+ recentSize := int(float64(size) * recentRatio)
+ evictSize := int(float64(size) * ghostRatio)
+
+ // Allocate the LRUs
+ recent, err := simplelru.NewLRU(size, nil)
+ if err != nil {
+ return nil, err
+ }
+ frequent, err := simplelru.NewLRU(size, nil)
+ if err != nil {
+ return nil, err
+ }
+ recentEvict, err := simplelru.NewLRU(evictSize, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ // Initialize the cache
+ c := &TwoQueueCache{
+ size: size,
+ recentSize: recentSize,
+ recent: recent,
+ frequent: frequent,
+ recentEvict: recentEvict,
+ }
+ return c, nil
+}
+
+// Get looks up a key's value from the cache.
+func (c *TwoQueueCache) Get(key interface{}) (value interface{}, ok bool) {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+
+ // Check if this is a frequent value
+ if val, ok := c.frequent.Get(key); ok {
+ return val, ok
+ }
+
+ // If the value is contained in recent, then we
+ // promote it to frequent
+ if val, ok := c.recent.Peek(key); ok {
+ c.recent.Remove(key)
+ c.frequent.Add(key, val)
+ return val, ok
+ }
+
+ // No hit
+ return nil, false
+}
+
+// Add adds a value to the cache.
+func (c *TwoQueueCache) Add(key, value interface{}) {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+
+ // Check if the value is frequently used already,
+ // and just update the value
+ if c.frequent.Contains(key) {
+ c.frequent.Add(key, value)
+ return
+ }
+
+ // Check if the value is recently used, and promote
+ // the value into the frequent list
+ if c.recent.Contains(key) {
+ c.recent.Remove(key)
+ c.frequent.Add(key, value)
+ return
+ }
+
+ // If the value was recently evicted, add it to the
+ // frequently used list
+ if c.recentEvict.Contains(key) {
+ c.ensureSpace(true)
+ c.recentEvict.Remove(key)
+ c.frequent.Add(key, value)
+ return
+ }
+
+ // Add to the recently seen list
+ c.ensureSpace(false)
+ c.recent.Add(key, value)
+ return
+}
+
+// ensureSpace is used to ensure we have space in the cache
+func (c *TwoQueueCache) ensureSpace(recentEvict bool) {
+ // If we have space, nothing to do
+ recentLen := c.recent.Len()
+ freqLen := c.frequent.Len()
+ if recentLen+freqLen < c.size {
+ return
+ }
+
+ // If the recent buffer is larger than
+ // the target, evict from there
+ if recentLen > 0 && (recentLen > c.recentSize || (recentLen == c.recentSize && !recentEvict)) {
+ k, _, _ := c.recent.RemoveOldest()
+ c.recentEvict.Add(k, nil)
+ return
+ }
+
+ // Remove from the frequent list otherwise
+ c.frequent.RemoveOldest()
+}
+
+// Len returns the number of items in the cache.
+func (c *TwoQueueCache) Len() int {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ return c.recent.Len() + c.frequent.Len()
+}
+
+// Keys returns a slice of the keys in the cache.
+// The frequently used keys are first in the returned slice.
+func (c *TwoQueueCache) Keys() []interface{} {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ k1 := c.frequent.Keys()
+ k2 := c.recent.Keys()
+ return append(k1, k2...)
+}
+
+// Remove removes the provided key from the cache.
+func (c *TwoQueueCache) Remove(key interface{}) {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+ if c.frequent.Remove(key) {
+ return
+ }
+ if c.recent.Remove(key) {
+ return
+ }
+ if c.recentEvict.Remove(key) {
+ return
+ }
+}
+
+// Purge is used to completely clear the cache.
+func (c *TwoQueueCache) Purge() {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+ c.recent.Purge()
+ c.frequent.Purge()
+ c.recentEvict.Purge()
+}
+
+// Contains is used to check if the cache contains a key
+// without updating recency or frequency.
+func (c *TwoQueueCache) Contains(key interface{}) bool {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ return c.frequent.Contains(key) || c.recent.Contains(key)
+}
+
+// Peek is used to inspect the cache value of a key
+// without updating recency or frequency.
+func (c *TwoQueueCache) Peek(key interface{}) (value interface{}, ok bool) {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ if val, ok := c.frequent.Peek(key); ok {
+ return val, ok
+ }
+ return c.recent.Peek(key)
+}
diff --git a/vendor/github.com/hashicorp/golang-lru/LICENSE b/vendor/github.com/hashicorp/golang-lru/LICENSE
new file mode 100644
index 00000000..be2cc4df
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/LICENSE
@@ -0,0 +1,362 @@
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. "Contributor"
+
+ means each individual or legal entity that creates, contributes to the
+ creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+
+ means the combination of the Contributions of others (if any) used by a
+ Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+
+ means Source Code Form to which the initial Contributor has attached the
+ notice in Exhibit A, the Executable Form of such Source Code Form, and
+ Modifications of such Source Code Form, in each case including portions
+ thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ a. that the initial Contributor has attached the notice described in
+ Exhibit B to the Covered Software; or
+
+ b. that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the terms of
+ a Secondary License.
+
+1.6. "Executable Form"
+
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+
+ means a work that combines Covered Software with other material, in a
+ separate file or files, that is not Covered Software.
+
+1.8. "License"
+
+ means this document.
+
+1.9. "Licensable"
+
+ means having the right to grant, to the maximum extent possible, whether
+ at the time of the initial grant or subsequently, any and all of the
+ rights conveyed by this License.
+
+1.10. "Modifications"
+
+ means any of the following:
+
+ a. any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered Software; or
+
+ b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. "Patent Claims" of a Contributor
+
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the License,
+ by the making, using, selling, offering for sale, having made, import,
+ or transfer of either its Contributions or its Contributor Version.
+
+1.12. "Secondary License"
+
+ means either the GNU General Public License, Version 2.0, the GNU Lesser
+ General Public License, Version 2.1, the GNU Affero General Public
+ License, Version 3.0, or any later versions of those licenses.
+
+1.13. "Source Code Form"
+
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that controls, is
+ controlled by, or is under common control with You. For purposes of this
+ definition, "control" means (a) the power, direct or indirect, to cause
+ the direction or management of such entity, whether by contract or
+ otherwise, or (b) ownership of more than fifty percent (50%) of the
+ outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+ Each Contributor hereby grants You a world-wide, royalty-free,
+ non-exclusive license:
+
+ a. under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+ b. under Patent Claims of such Contributor to make, use, sell, offer for
+ sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+ The licenses granted in Section 2.1 with respect to any Contribution
+ become effective for each Contribution on the date the Contributor first
+ distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+ The licenses granted in this Section 2 are the only rights granted under
+ this License. No additional rights or licenses will be implied from the
+ distribution or licensing of Covered Software under this License.
+ Notwithstanding Section 2.1(b) above, no patent license is granted by a
+ Contributor:
+
+ a. for any code that a Contributor has removed from Covered Software; or
+
+ b. for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+ c. under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+ This License does not grant any rights in the trademarks, service marks,
+ or logos of any Contributor (except as may be necessary to comply with
+ the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+ No Contributor makes additional grants as a result of Your choice to
+ distribute the Covered Software under a subsequent version of this
+ License (see Section 10.2) or under the terms of a Secondary License (if
+ permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+ Each Contributor represents that the Contributor believes its
+ Contributions are its original creation(s) or it has sufficient rights to
+ grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+ This License is not intended to limit any rights You have under
+ applicable copyright doctrines of fair use, fair dealing, or other
+ equivalents.
+
+2.7. Conditions
+
+ Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+ Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+ All distribution of Covered Software in Source Code Form, including any
+ Modifications that You create or to which You contribute, must be under
+ the terms of this License. You must inform recipients that the Source
+ Code Form of the Covered Software is governed by the terms of this
+ License, and how they can obtain a copy of this License. You may not
+ attempt to alter or restrict the recipients' rights in the Source Code
+ Form.
+
+3.2. Distribution of Executable Form
+
+ If You distribute Covered Software in Executable Form then:
+
+ a. such Covered Software must also be made available in Source Code Form,
+ as described in Section 3.1, and You must inform recipients of the
+ Executable Form how they can obtain a copy of such Source Code Form by
+ reasonable means in a timely manner, at a charge no more than the cost
+ of distribution to the recipient; and
+
+ b. You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter the
+ recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+ You may create and distribute a Larger Work under terms of Your choice,
+ provided that You also comply with the requirements of this License for
+ the Covered Software. If the Larger Work is a combination of Covered
+ Software with a work governed by one or more Secondary Licenses, and the
+ Covered Software is not Incompatible With Secondary Licenses, this
+ License permits You to additionally distribute such Covered Software
+ under the terms of such Secondary License(s), so that the recipient of
+ the Larger Work may, at their option, further distribute the Covered
+ Software under the terms of either this License or such Secondary
+ License(s).
+
+3.4. Notices
+
+ You may not remove or alter the substance of any license notices
+ (including copyright notices, patent notices, disclaimers of warranty, or
+ limitations of liability) contained within the Source Code Form of the
+ Covered Software, except that You may alter any license notices to the
+ extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+ You may choose to offer, and to charge a fee for, warranty, support,
+ indemnity or liability obligations to one or more recipients of Covered
+ Software. However, You may do so only on Your own behalf, and not on
+ behalf of any Contributor. You must make it absolutely clear that any
+ such warranty, support, indemnity, or liability obligation is offered by
+ You alone, and You hereby agree to indemnify every Contributor for any
+ liability incurred by such Contributor as a result of warranty, support,
+ indemnity or liability terms You offer. You may include additional
+ disclaimers of warranty and limitations of liability specific to any
+ jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+ If it is impossible for You to comply with any of the terms of this License
+ with respect to some or all of the Covered Software due to statute,
+ judicial order, or regulation then You must: (a) comply with the terms of
+ this License to the maximum extent possible; and (b) describe the
+ limitations and the code they affect. Such description must be placed in a
+ text file included with all distributions of the Covered Software under
+ this License. Except to the extent prohibited by statute or regulation,
+ such description must be sufficiently detailed for a recipient of ordinary
+ skill to be able to understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+ fail to comply with any of its terms. However, if You become compliant,
+ then the rights granted under this License from a particular Contributor
+ are reinstated (a) provisionally, unless and until such Contributor
+ explicitly and finally terminates Your grants, and (b) on an ongoing
+ basis, if such Contributor fails to notify You of the non-compliance by
+ some reasonable means prior to 60 days after You have come back into
+ compliance. Moreover, Your grants from a particular Contributor are
+ reinstated on an ongoing basis if such Contributor notifies You of the
+ non-compliance by some reasonable means, this is the first time You have
+ received notice of non-compliance with this License from such
+ Contributor, and You become compliant prior to 30 days after Your receipt
+ of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+ infringement claim (excluding declaratory judgment actions,
+ counter-claims, and cross-claims) alleging that a Contributor Version
+ directly or indirectly infringes any patent, then the rights granted to
+ You by any and all Contributors for the Covered Software under Section
+ 2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+ license agreements (excluding distributors and resellers) which have been
+ validly granted by You or Your distributors under this License prior to
+ termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+ Covered Software is provided under this License on an "as is" basis,
+ without warranty of any kind, either expressed, implied, or statutory,
+ including, without limitation, warranties that the Covered Software is free
+ of defects, merchantable, fit for a particular purpose or non-infringing.
+ The entire risk as to the quality and performance of the Covered Software
+ is with You. Should any Covered Software prove defective in any respect,
+ You (not any Contributor) assume the cost of any necessary servicing,
+ repair, or correction. This disclaimer of warranty constitutes an essential
+ part of this License. No use of any Covered Software is authorized under
+ this License except under this disclaimer.
+
+7. Limitation of Liability
+
+ Under no circumstances and under no legal theory, whether tort (including
+ negligence), contract, or otherwise, shall any Contributor, or anyone who
+ distributes Covered Software as permitted above, be liable to You for any
+ direct, indirect, special, incidental, or consequential damages of any
+ character including, without limitation, damages for lost profits, loss of
+ goodwill, work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses, even if such party shall have been
+ informed of the possibility of such damages. This limitation of liability
+ shall not apply to liability for death or personal injury resulting from
+ such party's negligence to the extent applicable law prohibits such
+ limitation. Some jurisdictions do not allow the exclusion or limitation of
+ incidental or consequential damages, so this exclusion and limitation may
+ not apply to You.
+
+8. Litigation
+
+ Any litigation relating to this License may be brought only in the courts
+ of a jurisdiction where the defendant maintains its principal place of
+ business and such litigation shall be governed by laws of that
+ jurisdiction, without reference to its conflict-of-law provisions. Nothing
+ in this Section shall prevent a party's ability to bring cross-claims or
+ counter-claims.
+
+9. Miscellaneous
+
+ This License represents the complete agreement concerning the subject
+ matter hereof. If any provision of this License is held to be
+ unenforceable, such provision shall be reformed only to the extent
+ necessary to make it enforceable. Any law or regulation which provides that
+ the language of a contract shall be construed against the drafter shall not
+ be used to construe this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+ Mozilla Foundation is the license steward. Except as provided in Section
+ 10.3, no one other than the license steward has the right to modify or
+ publish new versions of this License. Each version will be given a
+ distinguishing version number.
+
+10.2. Effect of New Versions
+
+ You may distribute the Covered Software under the terms of the version
+ of the License under which You originally received the Covered Software,
+ or under the terms of any subsequent version published by the license
+ steward.
+
+10.3. Modified Versions
+
+ If you create software not governed by this License, and you want to
+ create a new license for such software, you may create and use a
+ modified version of this License if you rename the license and remove
+ any references to the name of the license steward (except to note that
+ such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+ Licenses If You choose to distribute Source Code Form that is
+ Incompatible With Secondary Licenses under the terms of this version of
+ the License, the notice described in Exhibit B of this License must be
+ attached.
+
+Exhibit A - Source Code Form License Notice
+
+ This Source Code Form is subject to the
+ terms of the Mozilla Public License, v.
+ 2.0. If a copy of the MPL was not
+ distributed with this file, You can
+ obtain one at
+ http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file,
+then You may include the notice in a location (such as a LICENSE file in a
+relevant directory) where a recipient would be likely to look for such a
+notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+
+ This Source Code Form is "Incompatible
+ With Secondary Licenses", as defined by
+ the Mozilla Public License, v. 2.0.
diff --git a/vendor/github.com/hashicorp/golang-lru/README.md b/vendor/github.com/hashicorp/golang-lru/README.md
new file mode 100644
index 00000000..33e58cfa
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/README.md
@@ -0,0 +1,25 @@
+golang-lru
+==========
+
+This provides the `lru` package which implements a fixed-size
+thread safe LRU cache. It is based on the cache in Groupcache.
+
+Documentation
+=============
+
+Full docs are available on [Godoc](http://godoc.org/github.com/hashicorp/golang-lru)
+
+Example
+=======
+
+Using the LRU is very simple:
+
+```go
+l, _ := New(128)
+for i := 0; i < 256; i++ {
+ l.Add(i, nil)
+}
+if l.Len() != 128 {
+ panic(fmt.Sprintf("bad len: %v", l.Len()))
+}
+```
diff --git a/vendor/github.com/hashicorp/golang-lru/arc.go b/vendor/github.com/hashicorp/golang-lru/arc.go
new file mode 100644
index 00000000..555225a2
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/arc.go
@@ -0,0 +1,257 @@
+package lru
+
+import (
+ "sync"
+
+ "github.com/hashicorp/golang-lru/simplelru"
+)
+
+// ARCCache is a thread-safe fixed size Adaptive Replacement Cache (ARC).
+// ARC is an enhancement over the standard LRU cache in that tracks both
+// frequency and recency of use. This avoids a burst in access to new
+// entries from evicting the frequently used older entries. It adds some
+// additional tracking overhead to a standard LRU cache, computationally
+// it is roughly 2x the cost, and the extra memory overhead is linear
+// with the size of the cache. ARC has been patented by IBM, but is
+// similar to the TwoQueueCache (2Q) which requires setting parameters.
+type ARCCache struct {
+ size int // Size is the total capacity of the cache
+ p int // P is the dynamic preference towards T1 or T2
+
+ t1 simplelru.LRUCache // T1 is the LRU for recently accessed items
+ b1 simplelru.LRUCache // B1 is the LRU for evictions from t1
+
+ t2 simplelru.LRUCache // T2 is the LRU for frequently accessed items
+ b2 simplelru.LRUCache // B2 is the LRU for evictions from t2
+
+ lock sync.RWMutex
+}
+
+// NewARC creates an ARC of the given size
+func NewARC(size int) (*ARCCache, error) {
+ // Create the sub LRUs
+ b1, err := simplelru.NewLRU(size, nil)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := simplelru.NewLRU(size, nil)
+ if err != nil {
+ return nil, err
+ }
+ t1, err := simplelru.NewLRU(size, nil)
+ if err != nil {
+ return nil, err
+ }
+ t2, err := simplelru.NewLRU(size, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ // Initialize the ARC
+ c := &ARCCache{
+ size: size,
+ p: 0,
+ t1: t1,
+ b1: b1,
+ t2: t2,
+ b2: b2,
+ }
+ return c, nil
+}
+
+// Get looks up a key's value from the cache.
+func (c *ARCCache) Get(key interface{}) (value interface{}, ok bool) {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+
+ // If the value is contained in T1 (recent), then
+ // promote it to T2 (frequent)
+ if val, ok := c.t1.Peek(key); ok {
+ c.t1.Remove(key)
+ c.t2.Add(key, val)
+ return val, ok
+ }
+
+ // Check if the value is contained in T2 (frequent)
+ if val, ok := c.t2.Get(key); ok {
+ return val, ok
+ }
+
+ // No hit
+ return nil, false
+}
+
+// Add adds a value to the cache.
+func (c *ARCCache) Add(key, value interface{}) {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+
+ // Check if the value is contained in T1 (recent), and potentially
+ // promote it to frequent T2
+ if c.t1.Contains(key) {
+ c.t1.Remove(key)
+ c.t2.Add(key, value)
+ return
+ }
+
+ // Check if the value is already in T2 (frequent) and update it
+ if c.t2.Contains(key) {
+ c.t2.Add(key, value)
+ return
+ }
+
+ // Check if this value was recently evicted as part of the
+ // recently used list
+ if c.b1.Contains(key) {
+ // T1 set is too small, increase P appropriately
+ delta := 1
+ b1Len := c.b1.Len()
+ b2Len := c.b2.Len()
+ if b2Len > b1Len {
+ delta = b2Len / b1Len
+ }
+ if c.p+delta >= c.size {
+ c.p = c.size
+ } else {
+ c.p += delta
+ }
+
+ // Potentially need to make room in the cache
+ if c.t1.Len()+c.t2.Len() >= c.size {
+ c.replace(false)
+ }
+
+ // Remove from B1
+ c.b1.Remove(key)
+
+ // Add the key to the frequently used list
+ c.t2.Add(key, value)
+ return
+ }
+
+ // Check if this value was recently evicted as part of the
+ // frequently used list
+ if c.b2.Contains(key) {
+ // T2 set is too small, decrease P appropriately
+ delta := 1
+ b1Len := c.b1.Len()
+ b2Len := c.b2.Len()
+ if b1Len > b2Len {
+ delta = b1Len / b2Len
+ }
+ if delta >= c.p {
+ c.p = 0
+ } else {
+ c.p -= delta
+ }
+
+ // Potentially need to make room in the cache
+ if c.t1.Len()+c.t2.Len() >= c.size {
+ c.replace(true)
+ }
+
+ // Remove from B2
+ c.b2.Remove(key)
+
+ // Add the key to the frequently used list
+ c.t2.Add(key, value)
+ return
+ }
+
+ // Potentially need to make room in the cache
+ if c.t1.Len()+c.t2.Len() >= c.size {
+ c.replace(false)
+ }
+
+ // Keep the size of the ghost buffers trim
+ if c.b1.Len() > c.size-c.p {
+ c.b1.RemoveOldest()
+ }
+ if c.b2.Len() > c.p {
+ c.b2.RemoveOldest()
+ }
+
+ // Add to the recently seen list
+ c.t1.Add(key, value)
+ return
+}
+
+// replace is used to adaptively evict from either T1 or T2
+// based on the current learned value of P
+func (c *ARCCache) replace(b2ContainsKey bool) {
+ t1Len := c.t1.Len()
+ if t1Len > 0 && (t1Len > c.p || (t1Len == c.p && b2ContainsKey)) {
+ k, _, ok := c.t1.RemoveOldest()
+ if ok {
+ c.b1.Add(k, nil)
+ }
+ } else {
+ k, _, ok := c.t2.RemoveOldest()
+ if ok {
+ c.b2.Add(k, nil)
+ }
+ }
+}
+
+// Len returns the number of cached entries
+func (c *ARCCache) Len() int {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ return c.t1.Len() + c.t2.Len()
+}
+
+// Keys returns all the cached keys
+func (c *ARCCache) Keys() []interface{} {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ k1 := c.t1.Keys()
+ k2 := c.t2.Keys()
+ return append(k1, k2...)
+}
+
+// Remove is used to purge a key from the cache
+func (c *ARCCache) Remove(key interface{}) {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+ if c.t1.Remove(key) {
+ return
+ }
+ if c.t2.Remove(key) {
+ return
+ }
+ if c.b1.Remove(key) {
+ return
+ }
+ if c.b2.Remove(key) {
+ return
+ }
+}
+
+// Purge is used to clear the cache
+func (c *ARCCache) Purge() {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+ c.t1.Purge()
+ c.t2.Purge()
+ c.b1.Purge()
+ c.b2.Purge()
+}
+
+// Contains is used to check if the cache contains a key
+// without updating recency or frequency.
+func (c *ARCCache) Contains(key interface{}) bool {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ return c.t1.Contains(key) || c.t2.Contains(key)
+}
+
+// Peek is used to inspect the cache value of a key
+// without updating recency or frequency.
+func (c *ARCCache) Peek(key interface{}) (value interface{}, ok bool) {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ if val, ok := c.t1.Peek(key); ok {
+ return val, ok
+ }
+ return c.t2.Peek(key)
+}
diff --git a/vendor/github.com/hashicorp/golang-lru/doc.go b/vendor/github.com/hashicorp/golang-lru/doc.go
new file mode 100644
index 00000000..2547df97
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/doc.go
@@ -0,0 +1,21 @@
+// Package lru provides three different LRU caches of varying sophistication.
+//
+// Cache is a simple LRU cache. It is based on the
+// LRU implementation in groupcache:
+// https://github.com/golang/groupcache/tree/master/lru
+//
+// TwoQueueCache tracks frequently used and recently used entries separately.
+// This avoids a burst of accesses from taking out frequently used entries,
+// at the cost of about 2x computational overhead and some extra bookkeeping.
+//
+// ARCCache is an adaptive replacement cache. It tracks recent evictions as
+// well as recent usage in both the frequent and recent caches. Its
+// computational overhead is comparable to TwoQueueCache, but the memory
+// overhead is linear with the size of the cache.
+//
+// ARC has been patented by IBM, so do not use it if that is problematic for
+// your program.
+//
+// All caches in this package take locks while operating, and are therefore
+// thread-safe for consumers.
+package lru
diff --git a/vendor/github.com/hashicorp/golang-lru/go.mod b/vendor/github.com/hashicorp/golang-lru/go.mod
new file mode 100644
index 00000000..824cb97e
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/go.mod
@@ -0,0 +1 @@
+module github.com/hashicorp/golang-lru
diff --git a/vendor/github.com/hashicorp/golang-lru/lru.go b/vendor/github.com/hashicorp/golang-lru/lru.go
new file mode 100644
index 00000000..c8d9b0a2
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/lru.go
@@ -0,0 +1,110 @@
+package lru
+
+import (
+ "sync"
+
+ "github.com/hashicorp/golang-lru/simplelru"
+)
+
+// Cache is a thread-safe fixed size LRU cache.
+type Cache struct {
+ lru simplelru.LRUCache
+ lock sync.RWMutex
+}
+
+// New creates an LRU of the given size.
+func New(size int) (*Cache, error) {
+ return NewWithEvict(size, nil)
+}
+
+// NewWithEvict constructs a fixed size cache with the given eviction
+// callback.
+func NewWithEvict(size int, onEvicted func(key interface{}, value interface{})) (*Cache, error) {
+ lru, err := simplelru.NewLRU(size, simplelru.EvictCallback(onEvicted))
+ if err != nil {
+ return nil, err
+ }
+ c := &Cache{
+ lru: lru,
+ }
+ return c, nil
+}
+
+// Purge is used to completely clear the cache.
+func (c *Cache) Purge() {
+ c.lock.Lock()
+ c.lru.Purge()
+ c.lock.Unlock()
+}
+
+// Add adds a value to the cache. Returns true if an eviction occurred.
+func (c *Cache) Add(key, value interface{}) (evicted bool) {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+ return c.lru.Add(key, value)
+}
+
+// Get looks up a key's value from the cache.
+func (c *Cache) Get(key interface{}) (value interface{}, ok bool) {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+ return c.lru.Get(key)
+}
+
+// Contains checks if a key is in the cache, without updating the
+// recent-ness or deleting it for being stale.
+func (c *Cache) Contains(key interface{}) bool {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ return c.lru.Contains(key)
+}
+
+// Peek returns the key value (or undefined if not found) without updating
+// the "recently used"-ness of the key.
+func (c *Cache) Peek(key interface{}) (value interface{}, ok bool) {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ return c.lru.Peek(key)
+}
+
+// ContainsOrAdd checks if a key is in the cache without updating the
+// recent-ness or deleting it for being stale, and if not, adds the value.
+// Returns whether found and whether an eviction occurred.
+func (c *Cache) ContainsOrAdd(key, value interface{}) (ok, evicted bool) {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+
+ if c.lru.Contains(key) {
+ return true, false
+ }
+ evicted = c.lru.Add(key, value)
+ return false, evicted
+}
+
+// Remove removes the provided key from the cache.
+func (c *Cache) Remove(key interface{}) {
+ c.lock.Lock()
+ c.lru.Remove(key)
+ c.lock.Unlock()
+}
+
+// RemoveOldest removes the oldest item from the cache.
+func (c *Cache) RemoveOldest() {
+ c.lock.Lock()
+ c.lru.RemoveOldest()
+ c.lock.Unlock()
+}
+
+// Keys returns a slice of the keys in the cache, from oldest to newest.
+func (c *Cache) Keys() []interface{} {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ return c.lru.Keys()
+}
+
+// Len returns the number of items in the cache.
+func (c *Cache) Len() int {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ return c.lru.Len()
+}
diff --git a/vendor/github.com/hashicorp/golang-lru/simplelru/lru.go b/vendor/github.com/hashicorp/golang-lru/simplelru/lru.go
new file mode 100644
index 00000000..5673773b
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/simplelru/lru.go
@@ -0,0 +1,161 @@
+package simplelru
+
+import (
+ "container/list"
+ "errors"
+)
+
+// EvictCallback is used to get a callback when a cache entry is evicted
+type EvictCallback func(key interface{}, value interface{})
+
+// LRU implements a non-thread safe fixed size LRU cache
+type LRU struct {
+ size int
+ evictList *list.List
+ items map[interface{}]*list.Element
+ onEvict EvictCallback
+}
+
+// entry is used to hold a value in the evictList
+type entry struct {
+ key interface{}
+ value interface{}
+}
+
+// NewLRU constructs an LRU of the given size
+func NewLRU(size int, onEvict EvictCallback) (*LRU, error) {
+ if size <= 0 {
+ return nil, errors.New("Must provide a positive size")
+ }
+ c := &LRU{
+ size: size,
+ evictList: list.New(),
+ items: make(map[interface{}]*list.Element),
+ onEvict: onEvict,
+ }
+ return c, nil
+}
+
+// Purge is used to completely clear the cache.
+func (c *LRU) Purge() {
+ for k, v := range c.items {
+ if c.onEvict != nil {
+ c.onEvict(k, v.Value.(*entry).value)
+ }
+ delete(c.items, k)
+ }
+ c.evictList.Init()
+}
+
+// Add adds a value to the cache. Returns true if an eviction occurred.
+func (c *LRU) Add(key, value interface{}) (evicted bool) {
+ // Check for existing item
+ if ent, ok := c.items[key]; ok {
+ c.evictList.MoveToFront(ent)
+ ent.Value.(*entry).value = value
+ return false
+ }
+
+ // Add new item
+ ent := &entry{key, value}
+ entry := c.evictList.PushFront(ent)
+ c.items[key] = entry
+
+ evict := c.evictList.Len() > c.size
+ // Verify size not exceeded
+ if evict {
+ c.removeOldest()
+ }
+ return evict
+}
+
+// Get looks up a key's value from the cache.
+func (c *LRU) Get(key interface{}) (value interface{}, ok bool) {
+ if ent, ok := c.items[key]; ok {
+ c.evictList.MoveToFront(ent)
+ return ent.Value.(*entry).value, true
+ }
+ return
+}
+
+// Contains checks if a key is in the cache, without updating the recent-ness
+// or deleting it for being stale.
+func (c *LRU) Contains(key interface{}) (ok bool) {
+ _, ok = c.items[key]
+ return ok
+}
+
+// Peek returns the key value (or undefined if not found) without updating
+// the "recently used"-ness of the key.
+func (c *LRU) Peek(key interface{}) (value interface{}, ok bool) {
+ var ent *list.Element
+ if ent, ok = c.items[key]; ok {
+ return ent.Value.(*entry).value, true
+ }
+ return nil, ok
+}
+
+// Remove removes the provided key from the cache, returning if the
+// key was contained.
+func (c *LRU) Remove(key interface{}) (present bool) {
+ if ent, ok := c.items[key]; ok {
+ c.removeElement(ent)
+ return true
+ }
+ return false
+}
+
+// RemoveOldest removes the oldest item from the cache.
+func (c *LRU) RemoveOldest() (key interface{}, value interface{}, ok bool) {
+ ent := c.evictList.Back()
+ if ent != nil {
+ c.removeElement(ent)
+ kv := ent.Value.(*entry)
+ return kv.key, kv.value, true
+ }
+ return nil, nil, false
+}
+
+// GetOldest returns the oldest entry
+func (c *LRU) GetOldest() (key interface{}, value interface{}, ok bool) {
+ ent := c.evictList.Back()
+ if ent != nil {
+ kv := ent.Value.(*entry)
+ return kv.key, kv.value, true
+ }
+ return nil, nil, false
+}
+
+// Keys returns a slice of the keys in the cache, from oldest to newest.
+func (c *LRU) Keys() []interface{} {
+ keys := make([]interface{}, len(c.items))
+ i := 0
+ for ent := c.evictList.Back(); ent != nil; ent = ent.Prev() {
+ keys[i] = ent.Value.(*entry).key
+ i++
+ }
+ return keys
+}
+
+// Len returns the number of items in the cache.
+func (c *LRU) Len() int {
+ return c.evictList.Len()
+}
+
+// removeOldest removes the oldest item from the cache.
+func (c *LRU) removeOldest() {
+ ent := c.evictList.Back()
+ if ent != nil {
+ c.removeElement(ent)
+ }
+}
+
+// removeElement is used to remove a given list element from the cache
+func (c *LRU) removeElement(e *list.Element) {
+ c.evictList.Remove(e)
+ kv := e.Value.(*entry)
+ delete(c.items, kv.key)
+ if c.onEvict != nil {
+ c.onEvict(kv.key, kv.value)
+ }
+}
diff --git a/vendor/github.com/hashicorp/golang-lru/simplelru/lru_interface.go b/vendor/github.com/hashicorp/golang-lru/simplelru/lru_interface.go
new file mode 100644
index 00000000..74c70774
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/simplelru/lru_interface.go
@@ -0,0 +1,36 @@
+package simplelru
+
+// LRUCache is the interface for simple LRU cache.
+type LRUCache interface {
+ // Adds a value to the cache, returns true if an eviction occurred and
+ // updates the "recently used"-ness of the key.
+ Add(key, value interface{}) bool
+
+ // Returns key's value from the cache and
+ // updates the "recently used"-ness of the key. #value, isFound
+ Get(key interface{}) (value interface{}, ok bool)
+
+ // Check if a key exsists in cache without updating the recent-ness.
+ Contains(key interface{}) (ok bool)
+
+ // Returns key's value without updating the "recently used"-ness of the key.
+ Peek(key interface{}) (value interface{}, ok bool)
+
+ // Removes a key from the cache.
+ Remove(key interface{}) bool
+
+ // Removes the oldest entry from cache.
+ RemoveOldest() (interface{}, interface{}, bool)
+
+ // Returns the oldest entry from the cache. #key, value, isFound
+ GetOldest() (interface{}, interface{}, bool)
+
+ // Returns a slice of the keys in the cache, from oldest to newest.
+ Keys() []interface{}
+
+ // Returns the number of items in the cache.
+ Len() int
+
+ // Clear all cache entries
+ Purge()
+}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/build.go b/vendor/github.com/vektah/gqlgen/codegen/build.go
deleted file mode 100644
index d56fc06f..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/build.go
+++ /dev/null
@@ -1,165 +0,0 @@
-package codegen
-
-import (
- "fmt"
- "go/build"
- "go/types"
- "os"
-
- "github.com/pkg/errors"
- "golang.org/x/tools/go/loader"
-)
-
-type Build struct {
- PackageName string
- Objects Objects
- Inputs Objects
- Interfaces []*Interface
- Imports []*Import
- QueryRoot *Object
- MutationRoot *Object
- SubscriptionRoot *Object
- SchemaRaw string
-}
-
-type ModelBuild struct {
- PackageName string
- Imports []*Import
- Models []Model
- Enums []Enum
-}
-
-// Create a list of models that need to be generated
-func (cfg *Config) models() (*ModelBuild, error) {
- namedTypes := cfg.buildNamedTypes()
-
- prog, err := cfg.loadProgram(namedTypes, true)
- if err != nil {
- return nil, errors.Wrap(err, "loading failed")
- }
- imports := buildImports(namedTypes, cfg.Model.Dir())
-
- cfg.bindTypes(imports, namedTypes, cfg.Model.Dir(), prog)
-
- models, err := cfg.buildModels(namedTypes, prog)
- if err != nil {
- return nil, err
- }
- return &ModelBuild{
- PackageName: cfg.Model.Package,
- Models: models,
- Enums: cfg.buildEnums(namedTypes),
- Imports: imports.finalize(),
- }, nil
-}
-
-// bind a schema together with some code to generate a Build
-func (cfg *Config) bind() (*Build, error) {
- namedTypes := cfg.buildNamedTypes()
-
- prog, err := cfg.loadProgram(namedTypes, true)
- if err != nil {
- return nil, errors.Wrap(err, "loading failed")
- }
-
- imports := buildImports(namedTypes, cfg.Exec.Dir())
- cfg.bindTypes(imports, namedTypes, cfg.Exec.Dir(), prog)
-
- objects, err := cfg.buildObjects(namedTypes, prog, imports)
- if err != nil {
- return nil, err
- }
-
- inputs, err := cfg.buildInputs(namedTypes, prog, imports)
- if err != nil {
- return nil, err
- }
-
- b := &Build{
- PackageName: cfg.Exec.Package,
- Objects: objects,
- Interfaces: cfg.buildInterfaces(namedTypes, prog),
- Inputs: inputs,
- Imports: imports.finalize(),
- SchemaRaw: cfg.SchemaStr,
- }
-
- if qr, ok := cfg.schema.EntryPoints["query"]; ok {
- b.QueryRoot = b.Objects.ByName(qr.TypeName())
- }
-
- if mr, ok := cfg.schema.EntryPoints["mutation"]; ok {
- b.MutationRoot = b.Objects.ByName(mr.TypeName())
- }
-
- if sr, ok := cfg.schema.EntryPoints["subscription"]; ok {
- b.SubscriptionRoot = b.Objects.ByName(sr.TypeName())
- }
-
- if b.QueryRoot == nil {
- return b, fmt.Errorf("query entry point missing")
- }
-
- // Poke a few magic methods into query
- q := b.Objects.ByName(b.QueryRoot.GQLType)
- q.Fields = append(q.Fields, Field{
- Type: &Type{namedTypes["__Schema"], []string{modPtr}, nil},
- GQLName: "__schema",
- NoErr: true,
- GoMethodName: "ec.introspectSchema",
- Object: q,
- })
- q.Fields = append(q.Fields, Field{
- Type: &Type{namedTypes["__Type"], []string{modPtr}, nil},
- GQLName: "__type",
- NoErr: true,
- GoMethodName: "ec.introspectType",
- Args: []FieldArgument{
- {GQLName: "name", Type: &Type{namedTypes["String"], []string{}, nil}, Object: &Object{}},
- },
- Object: q,
- })
-
- return b, nil
-}
-
-func (cfg *Config) validate() error {
- namedTypes := cfg.buildNamedTypes()
-
- _, err := cfg.loadProgram(namedTypes, false)
- return err
-}
-
-func (cfg *Config) loadProgram(namedTypes NamedTypes, allowErrors bool) (*loader.Program, error) {
- conf := loader.Config{}
- if allowErrors {
- conf = loader.Config{
- AllowErrors: true,
- TypeChecker: types.Config{
- Error: func(e error) {},
- },
- }
- }
- for _, imp := range ambientImports {
- conf.Import(imp)
- }
-
- for _, imp := range namedTypes {
- if imp.Package != "" {
- conf.Import(imp.Package)
- }
- }
-
- return conf.Load()
-}
-
-func resolvePkg(pkgName string) (string, error) {
- cwd, _ := os.Getwd()
-
- pkg, err := build.Default.Import(pkgName, cwd, build.FindOnly)
- if err != nil {
- return "", err
- }
-
- return pkg.ImportPath, nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/codegen.go b/vendor/github.com/vektah/gqlgen/codegen/codegen.go
deleted file mode 100644
index 789ef2ec..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/codegen.go
+++ /dev/null
@@ -1,153 +0,0 @@
-package codegen
-
-import (
- "bytes"
- "fmt"
- "io/ioutil"
- "os"
- "path/filepath"
- "regexp"
- "syscall"
-
- "github.com/pkg/errors"
- "github.com/vektah/gqlgen/codegen/templates"
- "github.com/vektah/gqlgen/neelance/schema"
- "golang.org/x/tools/imports"
-)
-
-func Generate(cfg Config) error {
- if err := cfg.normalize(); err != nil {
- return err
- }
-
- _ = syscall.Unlink(cfg.Exec.Filename)
- _ = syscall.Unlink(cfg.Model.Filename)
-
- modelsBuild, err := cfg.models()
- if err != nil {
- return errors.Wrap(err, "model plan failed")
- }
- if len(modelsBuild.Models) > 0 || len(modelsBuild.Enums) > 0 {
- var buf *bytes.Buffer
- buf, err = templates.Run("models.gotpl", modelsBuild)
- if err != nil {
- return errors.Wrap(err, "model generation failed")
- }
-
- if err = write(cfg.Model.Filename, buf.Bytes()); err != nil {
- return err
- }
- for _, model := range modelsBuild.Models {
- modelCfg := cfg.Models[model.GQLType]
- modelCfg.Model = cfg.Model.ImportPath() + "." + model.GoType
- cfg.Models[model.GQLType] = modelCfg
- }
-
- for _, enum := range modelsBuild.Enums {
- modelCfg := cfg.Models[enum.GQLType]
- modelCfg.Model = cfg.Model.ImportPath() + "." + enum.GoType
- cfg.Models[enum.GQLType] = modelCfg
- }
- }
-
- build, err := cfg.bind()
- if err != nil {
- return errors.Wrap(err, "exec plan failed")
- }
-
- var buf *bytes.Buffer
- buf, err = templates.Run("generated.gotpl", build)
- if err != nil {
- return errors.Wrap(err, "exec codegen failed")
- }
-
- if err = write(cfg.Exec.Filename, buf.Bytes()); err != nil {
- return err
- }
-
- if err = cfg.validate(); err != nil {
- return errors.Wrap(err, "validation failed")
- }
-
- return nil
-}
-
-func (cfg *Config) normalize() error {
- if err := cfg.Model.normalize(); err != nil {
- return errors.Wrap(err, "model")
- }
-
- if err := cfg.Exec.normalize(); err != nil {
- return errors.Wrap(err, "exec")
- }
-
- builtins := TypeMap{
- "__Directive": {Model: "github.com/vektah/gqlgen/neelance/introspection.Directive"},
- "__Type": {Model: "github.com/vektah/gqlgen/neelance/introspection.Type"},
- "__Field": {Model: "github.com/vektah/gqlgen/neelance/introspection.Field"},
- "__EnumValue": {Model: "github.com/vektah/gqlgen/neelance/introspection.EnumValue"},
- "__InputValue": {Model: "github.com/vektah/gqlgen/neelance/introspection.InputValue"},
- "__Schema": {Model: "github.com/vektah/gqlgen/neelance/introspection.Schema"},
- "Int": {Model: "github.com/vektah/gqlgen/graphql.Int"},
- "Float": {Model: "github.com/vektah/gqlgen/graphql.Float"},
- "String": {Model: "github.com/vektah/gqlgen/graphql.String"},
- "Boolean": {Model: "github.com/vektah/gqlgen/graphql.Boolean"},
- "ID": {Model: "github.com/vektah/gqlgen/graphql.ID"},
- "Time": {Model: "github.com/vektah/gqlgen/graphql.Time"},
- "Map": {Model: "github.com/vektah/gqlgen/graphql.Map"},
- }
-
- if cfg.Models == nil {
- cfg.Models = TypeMap{}
- }
- for typeName, entry := range builtins {
- if !cfg.Models.Exists(typeName) {
- cfg.Models[typeName] = entry
- }
- }
-
- cfg.schema = schema.New()
- return cfg.schema.Parse(cfg.SchemaStr)
-}
-
-var invalidPackageNameChar = regexp.MustCompile(`[^\w]`)
-
-func sanitizePackageName(pkg string) string {
- return invalidPackageNameChar.ReplaceAllLiteralString(filepath.Base(pkg), "_")
-}
-
-func abs(path string) string {
- absPath, err := filepath.Abs(path)
- if err != nil {
- panic(err)
- }
- return filepath.ToSlash(absPath)
-}
-
-func gofmt(filename string, b []byte) ([]byte, error) {
- out, err := imports.Process(filename, b, nil)
- if err != nil {
- return b, errors.Wrap(err, "unable to gofmt")
- }
- return out, nil
-}
-
-func write(filename string, b []byte) error {
- err := os.MkdirAll(filepath.Dir(filename), 0755)
- if err != nil {
- return errors.Wrap(err, "failed to create directory")
- }
-
- formatted, err := gofmt(filename, b)
- if err != nil {
- fmt.Fprintf(os.Stderr, "gofmt failed: %s\n", err.Error())
- formatted = b
- }
-
- err = ioutil.WriteFile(filename, formatted, 0644)
- if err != nil {
- return errors.Wrapf(err, "failed to write %s", filename)
- }
-
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/enum_build.go b/vendor/github.com/vektah/gqlgen/codegen/enum_build.go
deleted file mode 100644
index f2e6f63c..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/enum_build.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package codegen
-
-import (
- "sort"
- "strings"
-
- "github.com/vektah/gqlgen/codegen/templates"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-func (cfg *Config) buildEnums(types NamedTypes) []Enum {
- var enums []Enum
-
- for _, typ := range cfg.schema.Types {
- namedType := types[typ.TypeName()]
- e, isEnum := typ.(*schema.Enum)
- if !isEnum || strings.HasPrefix(typ.TypeName(), "__") || namedType.IsUserDefined {
- continue
- }
-
- var values []EnumValue
- for _, v := range e.Values {
- values = append(values, EnumValue{v.Name, v.Desc})
- }
-
- enum := Enum{
- NamedType: namedType,
- Values: values,
- }
- enum.GoType = templates.ToCamel(enum.GQLType)
- enums = append(enums, enum)
- }
-
- sort.Slice(enums, func(i, j int) bool {
- return strings.Compare(enums[i].GQLType, enums[j].GQLType) == -1
- })
-
- return enums
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/model.go b/vendor/github.com/vektah/gqlgen/codegen/model.go
deleted file mode 100644
index 164a04d5..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/model.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package codegen
-
-type Model struct {
- *NamedType
-
- Fields []ModelField
-}
-
-type ModelField struct {
- *Type
- GQLName string
- GoVarName string
- GoFKName string
- GoFKType string
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/object.go b/vendor/github.com/vektah/gqlgen/codegen/object.go
deleted file mode 100644
index 1c03c0ba..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/object.go
+++ /dev/null
@@ -1,206 +0,0 @@
-package codegen
-
-import (
- "bytes"
- "fmt"
- "strconv"
- "strings"
- "text/template"
- "unicode"
-)
-
-type Object struct {
- *NamedType
-
- Fields []Field
- Satisfies []string
- Root bool
- DisableConcurrency bool
- Stream bool
-}
-
-type Field struct {
- *Type
-
- GQLName string // The name of the field in graphql
- GoMethodName string // The name of the method in go, if any
- GoVarName string // The name of the var in go, if any
- Args []FieldArgument // A list of arguments to be passed to this field
- ForceResolver bool // Should be emit Resolver method
- NoErr bool // If this is bound to a go method, does that method have an error as the second argument
- Object *Object // A link back to the parent object
- Default interface{} // The default value
-}
-
-type FieldArgument struct {
- *Type
-
- GQLName string // The name of the argument in graphql
- GoVarName string // The name of the var in go
- Object *Object // A link back to the parent object
- Default interface{} // The default value
-}
-
-type Objects []*Object
-
-func (o *Object) Implementors() string {
- satisfiedBy := strconv.Quote(o.GQLType)
- for _, s := range o.Satisfies {
- satisfiedBy += ", " + strconv.Quote(s)
- }
- return "[]string{" + satisfiedBy + "}"
-}
-
-func (o *Object) HasResolvers() bool {
- for _, f := range o.Fields {
- if f.IsResolver() {
- return true
- }
- }
- return false
-}
-
-func (f *Field) IsResolver() bool {
- return f.ForceResolver || f.GoMethodName == "" && f.GoVarName == ""
-}
-
-func (f *Field) IsConcurrent() bool {
- return f.IsResolver() && !f.Object.DisableConcurrency
-}
-func (f *Field) ShortInvocation() string {
- if !f.IsResolver() {
- return ""
- }
- shortName := strings.ToUpper(f.GQLName[:1]) + f.GQLName[1:]
- res := fmt.Sprintf("%s().%s(ctx", f.Object.GQLType, shortName)
- if !f.Object.Root {
- res += fmt.Sprintf(", obj")
- }
- for _, arg := range f.Args {
- res += fmt.Sprintf(", %s", arg.GoVarName)
- }
- res += ")"
- return res
-}
-func (f *Field) ShortResolverDeclaration() string {
- if !f.IsResolver() {
- return ""
- }
- decl := strings.TrimPrefix(f.ResolverDeclaration(), f.Object.GQLType+"_")
- return strings.ToUpper(decl[:1]) + decl[1:]
-}
-
-func (f *Field) ResolverDeclaration() string {
- if !f.IsResolver() {
- return ""
- }
- res := fmt.Sprintf("%s_%s(ctx context.Context", f.Object.GQLType, f.GQLName)
-
- if !f.Object.Root {
- res += fmt.Sprintf(", obj *%s", f.Object.FullName())
- }
- for _, arg := range f.Args {
- res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
- }
-
- result := f.Signature()
- if f.Object.Stream {
- result = "<-chan " + result
- }
-
- res += fmt.Sprintf(") (%s, error)", result)
- return res
-}
-
-func (f *Field) CallArgs() string {
- var args []string
-
- if f.GoMethodName == "" {
- args = append(args, "ctx")
-
- if !f.Object.Root {
- args = append(args, "obj")
- }
- }
-
- for _, arg := range f.Args {
- args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
- }
-
- return strings.Join(args, ", ")
-}
-
-// should be in the template, but its recursive and has a bunch of args
-func (f *Field) WriteJson() string {
- return f.doWriteJson("res", f.Type.Modifiers, false, 1)
-}
-
-func (f *Field) doWriteJson(val string, remainingMods []string, isPtr bool, depth int) string {
- switch {
- case len(remainingMods) > 0 && remainingMods[0] == modPtr:
- return fmt.Sprintf("if %s == nil { return graphql.Null }\n%s", val, f.doWriteJson(val, remainingMods[1:], true, depth+1))
-
- case len(remainingMods) > 0 && remainingMods[0] == modList:
- if isPtr {
- val = "*" + val
- }
- var arr = "arr" + strconv.Itoa(depth)
- var index = "idx" + strconv.Itoa(depth)
-
- return tpl(`{{.arr}} := graphql.Array{}
- for {{.index}} := range {{.val}} {
- {{.arr}} = append({{.arr}}, func() graphql.Marshaler {
- rctx := graphql.GetResolverContext(ctx)
- rctx.PushIndex({{.index}})
- defer rctx.Pop()
- {{ .next }}
- }())
- }
- return {{.arr}}`, map[string]interface{}{
- "val": val,
- "arr": arr,
- "index": index,
- "next": f.doWriteJson(val+"["+index+"]", remainingMods[1:], false, depth+1),
- })
-
- case f.IsScalar:
- if isPtr {
- val = "*" + val
- }
- return f.Marshal(val)
-
- default:
- if !isPtr {
- val = "&" + val
- }
- return fmt.Sprintf("return ec._%s(ctx, field.Selections, %s)", f.GQLType, val)
- }
-}
-
-func (os Objects) ByName(name string) *Object {
- for i, o := range os {
- if strings.EqualFold(o.GQLType, name) {
- return os[i]
- }
- }
- return nil
-}
-
-func tpl(tpl string, vars map[string]interface{}) string {
- b := &bytes.Buffer{}
- err := template.Must(template.New("inline").Parse(tpl)).Execute(b, vars)
- if err != nil {
- panic(err)
- }
- return b.String()
-}
-
-func ucFirst(s string) string {
- if s == "" {
- return ""
- }
-
- r := []rune(s)
- r[0] = unicode.ToUpper(r[0])
- return string(r)
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/object_build.go b/vendor/github.com/vektah/gqlgen/codegen/object_build.go
deleted file mode 100644
index 0ef40fef..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/object_build.go
+++ /dev/null
@@ -1,144 +0,0 @@
-package codegen
-
-import (
- "log"
- "sort"
- "strings"
-
- "github.com/pkg/errors"
- "github.com/vektah/gqlgen/neelance/schema"
- "golang.org/x/tools/go/loader"
-)
-
-func (cfg *Config) buildObjects(types NamedTypes, prog *loader.Program, imports *Imports) (Objects, error) {
- var objects Objects
-
- for _, typ := range cfg.schema.Types {
- switch typ := typ.(type) {
- case *schema.Object:
- obj, err := cfg.buildObject(types, typ)
- if err != nil {
- return nil, err
- }
-
- def, err := findGoType(prog, obj.Package, obj.GoType)
- if err != nil {
- return nil, err
- }
- if def != nil {
- for _, bindErr := range bindObject(def.Type(), obj, imports) {
- log.Println(bindErr.Error())
- log.Println(" Adding resolver method")
- }
- }
-
- objects = append(objects, obj)
- }
- }
-
- sort.Slice(objects, func(i, j int) bool {
- return strings.Compare(objects[i].GQLType, objects[j].GQLType) == -1
- })
-
- return objects, nil
-}
-
-var keywords = []string{
- "break",
- "default",
- "func",
- "interface",
- "select",
- "case",
- "defer",
- "go",
- "map",
- "struct",
- "chan",
- "else",
- "goto",
- "package",
- "switch",
- "const",
- "fallthrough",
- "if",
- "range",
- "type",
- "continue",
- "for",
- "import",
- "return",
- "var",
-}
-
-func sanitizeGoName(name string) string {
- for _, k := range keywords {
- if name == k {
- return name + "_"
- }
- }
- return name
-}
-
-func (cfg *Config) buildObject(types NamedTypes, typ *schema.Object) (*Object, error) {
- obj := &Object{NamedType: types[typ.TypeName()]}
- typeEntry, entryExists := cfg.Models[typ.TypeName()]
-
- for _, i := range typ.Interfaces {
- obj.Satisfies = append(obj.Satisfies, i.Name)
- }
-
- for _, field := range typ.Fields {
-
- var forceResolver bool
- if entryExists {
- if typeField, ok := typeEntry.Fields[field.Name]; ok {
- forceResolver = typeField.Resolver
- }
- }
-
- var args []FieldArgument
- for _, arg := range field.Args {
- newArg := FieldArgument{
- GQLName: arg.Name.Name,
- Type: types.getType(arg.Type),
- Object: obj,
- GoVarName: sanitizeGoName(arg.Name.Name),
- }
-
- if !newArg.Type.IsInput && !newArg.Type.IsScalar {
- return nil, errors.Errorf("%s cannot be used as argument of %s.%s. only input and scalar types are allowed", arg.Type, obj.GQLType, field.Name)
- }
-
- if arg.Default != nil {
- newArg.Default = arg.Default.Value(nil)
- newArg.StripPtr()
- }
- args = append(args, newArg)
- }
-
- obj.Fields = append(obj.Fields, Field{
- GQLName: field.Name,
- Type: types.getType(field.Type),
- Args: args,
- Object: obj,
- ForceResolver: forceResolver,
- })
- }
-
- for name, typ := range cfg.schema.EntryPoints {
- schemaObj := typ.(*schema.Object)
- if schemaObj.TypeName() != obj.GQLType {
- continue
- }
-
- obj.Root = true
- if name == "mutation" {
- obj.DisableConcurrency = true
- }
- if name == "subscription" {
- obj.Stream = true
- }
- }
- return obj, nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/args.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/args.gotpl
deleted file mode 100644
index f53aceec..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/args.gotpl
+++ /dev/null
@@ -1,30 +0,0 @@
- {{- if . }}args := map[string]interface{}{} {{end}}
- {{- range $i, $arg := . }}
- var arg{{$i}} {{$arg.Signature }}
- if tmp, ok := field.Args[{{$arg.GQLName|quote}}]; ok {
- var err error
- {{$arg.Unmarshal (print "arg" $i) "tmp" }}
- if err != nil {
- ec.Error(ctx, err)
- {{- if $arg.Object.Stream }}
- return nil
- {{- else }}
- return graphql.Null
- {{- end }}
- }
- } {{ if $arg.Default }} else {
- var tmp interface{} = {{ $arg.Default | dump }}
- var err error
- {{$arg.Unmarshal (print "arg" $i) "tmp" }}
- if err != nil {
- ec.Error(ctx, err)
- {{- if $arg.Object.Stream }}
- return nil
- {{- else }}
- return graphql.Null
- {{- end }}
- }
- }
- {{end }}
- args[{{$arg.GQLName|quote}}] = arg{{$i}}
- {{- end -}}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/data.go b/vendor/github.com/vektah/gqlgen/codegen/templates/data.go
deleted file mode 100644
index d6da4807..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/data.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package templates
-
-var data = map[string]string{
- "args.gotpl": "\t{{- if . }}args := map[string]interface{}{} {{end}}\n\t{{- range $i, $arg := . }}\n\t\tvar arg{{$i}} {{$arg.Signature }}\n\t\tif tmp, ok := field.Args[{{$arg.GQLName|quote}}]; ok {\n\t\t\tvar err error\n\t\t\t{{$arg.Unmarshal (print \"arg\" $i) \"tmp\" }}\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\t{{- if $arg.Object.Stream }}\n\t\t\t\t\treturn nil\n\t\t\t\t{{- else }}\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t{{- end }}\n\t\t\t}\n\t\t} {{ if $arg.Default }} else {\n\t\t\tvar tmp interface{} = {{ $arg.Default | dump }}\n\t\t\tvar err error\n\t\t\t{{$arg.Unmarshal (print \"arg\" $i) \"tmp\" }}\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\t{{- if $arg.Object.Stream }}\n\t\t\t\t\treturn nil\n\t\t\t\t{{- else }}\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t{{- end }}\n\t\t\t}\n\t\t}\n\t\t{{end }}\n\t\targs[{{$arg.GQLName|quote}}] = arg{{$i}}\n\t{{- end -}}\n",
- "field.gotpl": "{{ $field := . }}\n{{ $object := $field.Object }}\n\n{{- if $object.Stream }}\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {\n\t\t{{- template \"args.gotpl\" $field.Args }}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{Field: field})\n\t\tresults, err := ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})\n\t\tif err != nil {\n\t\t\tec.Error(ctx, err)\n\t\t\treturn nil\n\t\t}\n\t\treturn func() graphql.Marshaler {\n\t\t\tres, ok := <-results\n\t\t\tif !ok {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tvar out graphql.OrderedMap\n\t\t\tout.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())\n\t\t\treturn &out\n\t\t}\n\t}\n{{ else }}\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {\n\t\t{{- template \"args.gotpl\" $field.Args }}\n\n\t\t{{- if $field.IsConcurrent }}\n\t\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\t\tObject: {{$object.GQLType|quote}},\n\t\t\t\tArgs: {{if $field.Args }}args{{else}}nil{{end}},\n\t\t\t\tField: field,\n\t\t\t})\n\t\t\treturn graphql.Defer(func() (ret graphql.Marshaler) {\n\t\t\t\tdefer func() {\n\t\t\t\t\tif r := recover(); r != nil {\n\t\t\t\t\t\tuserErr := ec.Recover(ctx, r)\n\t\t\t\t\t\tec.Error(ctx, userErr)\n\t\t\t\t\t\tret = graphql.Null\n\t\t\t\t\t}\n\t\t\t\t}()\n\t\t{{ else }}\n\t\t\trctx := graphql.GetResolverContext(ctx)\n\t\t\trctx.Object = {{$object.GQLType|quote}}\n\t\t\trctx.Args = {{if $field.Args }}args{{else}}nil{{end}}\n\t\t\trctx.Field = field\n\t\t\trctx.PushField(field.Alias)\n\t\t\tdefer rctx.Pop()\n\t\t{{- end }}\n\n\t\t\t{{- if $field.IsResolver }}\n\t\t\t\tresTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {\n\t\t\t\t\treturn ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\tec.Error(ctx, err)\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t}\n\t\t\t\tif resTmp == nil {\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t}\n\t\t\t\tres := resTmp.({{$field.Signature}})\n\t\t\t{{- else if $field.GoVarName }}\n\t\t\t\tres := obj.{{$field.GoVarName}}\n\t\t\t{{- else if $field.GoMethodName }}\n\t\t\t\t{{- if $field.NoErr }}\n\t\t\t\t\tres := {{$field.GoMethodName}}({{ $field.CallArgs }})\n\t\t\t\t{{- else }}\n\t\t\t\t\tres, err := {{$field.GoMethodName}}({{ $field.CallArgs }})\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tec.Error(ctx, err)\n\t\t\t\t\t\treturn graphql.Null\n\t\t\t\t\t}\n\t\t\t\t{{- end }}\n\t\t\t{{- end }}\n\t\t\t{{ $field.WriteJson }}\n\t\t{{- if $field.IsConcurrent }}\n\t\t\t})\n\t\t{{- end }}\n\t}\n{{ end }}\n",
- "generated.gotpl": "// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\n// MakeExecutableSchema creates an ExecutableSchema from the Resolvers interface.\nfunc MakeExecutableSchema(resolvers Resolvers) graphql.ExecutableSchema {\n\treturn &executableSchema{resolvers: resolvers}\n}\n\n// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.\nfunc NewExecutableSchema(resolvers ResolverRoot) graphql.ExecutableSchema {\n\treturn MakeExecutableSchema(shortMapper{r: resolvers})\n}\n\ntype Resolvers interface {\n{{- range $object := .Objects -}}\n\t{{ range $field := $object.Fields -}}\n\t\t{{ $field.ResolverDeclaration }}\n\t{{ end }}\n{{- end }}\n}\n\ntype ResolverRoot interface {\n{{- range $object := .Objects -}}\n\t{{ if $object.HasResolvers -}}\n\t\t{{$object.GQLType}}() {{$object.GQLType}}Resolver\n\t{{ end }}\n{{- end }}\n}\n\n{{- range $object := .Objects -}}\n\t{{ if $object.HasResolvers }}\n\t\ttype {{$object.GQLType}}Resolver interface {\n\t\t{{ range $field := $object.Fields -}}\n\t\t\t{{ $field.ShortResolverDeclaration }}\n\t\t{{ end }}\n\t\t}\n\t{{- end }}\n{{- end }}\n\ntype shortMapper struct {\n\tr ResolverRoot\n}\n\n{{- range $object := .Objects -}}\n\t{{ range $field := $object.Fields -}}\n\t\t{{- if $field.IsResolver }}\n\t\t\tfunc (s shortMapper) {{ $field.ResolverDeclaration }} {\n\t\t\t\treturn s.r.{{$field.ShortInvocation}}\n\t\t\t}\n\t\t{{- end }}\n\t{{ end }}\n{{- end }}\n\ntype executableSchema struct {\n\tresolvers Resolvers\n}\n\nfunc (e *executableSchema) Schema() *schema.Schema {\n\treturn parsedSchema\n}\n\nfunc (e *executableSchema) Query(ctx context.Context, op *query.Operation) *graphql.Response {\n\t{{- if .QueryRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.QueryRoot.GQLType}}(ctx, op.Selections)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"queries are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {\n\t{{- if .MutationRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.MutationRoot.GQLType}}(ctx, op.Selections)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"mutations are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {\n\t{{- if .SubscriptionRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}\n\n\t\tnext := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.Selections)\n\t\tif ec.Errors != nil {\n\t\t\treturn graphql.OneShot(&graphql.Response{Data: []byte(\"null\"), Errors: ec.Errors})\n\t\t}\n\n\t\tvar buf bytes.Buffer\n\t\treturn func() *graphql.Response {\n\t\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\t\tbuf.Reset()\n\t\t\t\tdata := next()\n\n\t\t\t\tif data == nil {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\tdata.MarshalGQL(&buf)\n\t\t\t\treturn buf.Bytes()\n\t\t\t})\n\n\t\t\treturn &graphql.Response{\n\t\t\t\tData: buf,\n\t\t\t\tErrors: ec.Errors,\n\t\t\t}\n\t\t}\n\t{{- else }}\n\t\treturn graphql.OneShot(graphql.ErrorResponse(ctx, \"subscriptions are not supported\"))\n\t{{- end }}\n}\n\ntype executionContext struct {\n\t*graphql.RequestContext\n\n\tresolvers Resolvers\n}\n\n{{- range $object := .Objects }}\n\t{{ template \"object.gotpl\" $object }}\n\n\t{{- range $field := $object.Fields }}\n\t\t{{ template \"field.gotpl\" $field }}\n\t{{ end }}\n{{- end}}\n\n{{- range $interface := .Interfaces }}\n\t{{ template \"interface.gotpl\" $interface }}\n{{- end }}\n\n{{- range $input := .Inputs }}\n\t{{ template \"input.gotpl\" $input }}\n{{- end }}\n\nfunc (ec *executionContext) introspectSchema() *introspection.Schema {\n\treturn introspection.WrapSchema(parsedSchema)\n}\n\nfunc (ec *executionContext) introspectType(name string) *introspection.Type {\n\tt := parsedSchema.Resolve(name)\n\tif t == nil {\n\t\treturn nil\n\t}\n\treturn introspection.WrapType(t)\n}\n\nvar parsedSchema = schema.MustParse({{.SchemaRaw|rawQuote}})\n",
- "input.gotpl": "\t{{- if .IsMarshaled }}\n\tfunc Unmarshal{{ .GQLType }}(v interface{}) ({{.FullName}}, error) {\n\t\tvar it {{.FullName}}\n\t\tvar asMap = v.(map[string]interface{})\n\t\t{{ range $field := .Fields}}\n\t\t\t{{- if $field.Default}}\n\t\t\t\tif _, present := asMap[{{$field.GQLName|quote}}] ; !present {\n\t\t\t\t\tasMap[{{$field.GQLName|quote}}] = {{ $field.Default | dump }}\n\t\t\t\t}\n\t\t\t{{- end}}\n\t\t{{- end }}\n\n\t\tfor k, v := range asMap {\n\t\t\tswitch k {\n\t\t\t{{- range $field := .Fields }}\n\t\t\tcase {{$field.GQLName|quote}}:\n\t\t\t\tvar err error\n\t\t\t\t{{ $field.Unmarshal (print \"it.\" $field.GoVarName) \"v\" }}\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn it, err\n\t\t\t\t}\n\t\t\t{{- end }}\n\t\t\t}\n\t\t}\n\n\t\treturn it, nil\n\t}\n\t{{- end }}\n",
- "interface.gotpl": "{{- $interface := . }}\n\nfunc (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel []query.Selection, obj *{{$interface.FullName}}) graphql.Marshaler {\n\tswitch obj := (*obj).(type) {\n\tcase nil:\n\t\treturn graphql.Null\n\t{{- range $implementor := $interface.Implementors }}\n\t\t{{- if $implementor.ValueReceiver }}\n\t\t\tcase {{$implementor.FullName}}:\n\t\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, &obj)\n\t\t{{- end}}\n\t\tcase *{{$implementor.FullName}}:\n\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, obj)\n\t{{- end }}\n\tdefault:\n\t\tpanic(fmt.Errorf(\"unexpected type %T\", obj))\n\t}\n}\n",
- "models.gotpl": "// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\n{{ range $model := .Models }}\n\t{{- if .IsInterface }}\n\t\ttype {{.GoType}} interface {}\n\t{{- else }}\n\t\ttype {{.GoType}} struct {\n\t\t\t{{- range $field := .Fields }}\n\t\t\t\t{{- if $field.GoVarName }}\n\t\t\t\t\t{{ $field.GoVarName }} {{$field.Signature}} `json:\"{{$field.GQLName}}\"`\n\t\t\t\t{{- else }}\n\t\t\t\t\t{{ $field.GoFKName }} {{$field.GoFKType}}\n\t\t\t\t{{- end }}\n\t\t\t{{- end }}\n\t\t}\n\t{{- end }}\n{{- end}}\n\n{{ range $enum := .Enums }}\n\ttype {{.GoType}} string\n\tconst (\n\t{{ range $value := .Values -}}\n\t\t{{with .Description}} {{.|prefixLines \"// \"}} {{end}}\n\t\t{{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}}\n\t{{- end }}\n\t)\n\n\tfunc (e {{.GoType}}) IsValid() bool {\n\t\tswitch e {\n\t\tcase {{ range $index, $element := .Values}}{{if $index}},{{end}}{{ $enum.GoType }}{{ $element.Name|toCamel }}{{end}}:\n\t\t\treturn true\n\t\t}\n\t\treturn false\n\t}\n\n\tfunc (e {{.GoType}}) String() string {\n\t\treturn string(e)\n\t}\n\n\tfunc (e *{{.GoType}}) UnmarshalGQL(v interface{}) error {\n\t\tstr, ok := v.(string)\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"enums must be strings\")\n\t\t}\n\n\t\t*e = {{.GoType}}(str)\n\t\tif !e.IsValid() {\n\t\t\treturn fmt.Errorf(\"%s is not a valid {{.GQLType}}\", str)\n\t\t}\n\t\treturn nil\n\t}\n\n\tfunc (e {{.GoType}}) MarshalGQL(w io.Writer) {\n\t\tfmt.Fprint(w, strconv.Quote(e.String()))\n\t}\n\n{{- end }}\n",
- "object.gotpl": "{{ $object := . }}\n\nvar {{ $object.GQLType|lcFirst}}Implementors = {{$object.Implementors}}\n\n// nolint: gocyclo, errcheck, gas, goconst\n{{- if .Stream }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection) func() graphql.Marshaler {\n\tfields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)\n\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\tObject: {{$object.GQLType|quote}},\n\t})\n\tif len(fields) != 1 {\n\t\tec.Errorf(ctx, \"must subscribe to exactly one stream\")\n\t\treturn nil\n\t}\n\n\tswitch fields[0].Name {\n\t{{- range $field := $object.Fields }}\n\tcase \"{{$field.GQLName}}\":\n\t\treturn ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, fields[0])\n\t{{- end }}\n\tdefault:\n\t\tpanic(\"unknown field \" + strconv.Quote(fields[0].Name))\n\t}\n}\n{{- else }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {\n\tfields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)\n\t{{if $object.Root}}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\tObject: {{$object.GQLType|quote}},\n\t\t})\n\t{{end}}\n\tout := graphql.NewOrderedMap(len(fields))\n\tfor i, field := range fields {\n\t\tout.Keys[i] = field.Alias\n\n\t\tswitch field.Name {\n\t\tcase \"__typename\":\n\t\t\tout.Values[i] = graphql.MarshalString({{$object.GQLType|quote}})\n\t\t{{- range $field := $object.Fields }}\n\t\tcase \"{{$field.GQLName}}\":\n\t\t\tout.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})\n\t\t{{- end }}\n\t\tdefault:\n\t\t\tpanic(\"unknown field \" + strconv.Quote(field.Name))\n\t\t}\n\t}\n\n\treturn out\n}\n{{- end }}\n",
-}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/field.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/field.gotpl
deleted file mode 100644
index 4279ad8e..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/field.gotpl
+++ /dev/null
@@ -1,80 +0,0 @@
-{{ $field := . }}
-{{ $object := $field.Object }}
-
-{{- if $object.Stream }}
- func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {
- {{- template "args.gotpl" $field.Args }}
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{Field: field})
- results, err := ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})
- if err != nil {
- ec.Error(ctx, err)
- return nil
- }
- return func() graphql.Marshaler {
- res, ok := <-results
- if !ok {
- return nil
- }
- var out graphql.OrderedMap
- out.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())
- return &out
- }
- }
-{{ else }}
- func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {
- {{- template "args.gotpl" $field.Args }}
-
- {{- if $field.IsConcurrent }}
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
- Object: {{$object.GQLType|quote}},
- Args: {{if $field.Args }}args{{else}}nil{{end}},
- Field: field,
- })
- return graphql.Defer(func() (ret graphql.Marshaler) {
- defer func() {
- if r := recover(); r != nil {
- userErr := ec.Recover(ctx, r)
- ec.Error(ctx, userErr)
- ret = graphql.Null
- }
- }()
- {{ else }}
- rctx := graphql.GetResolverContext(ctx)
- rctx.Object = {{$object.GQLType|quote}}
- rctx.Args = {{if $field.Args }}args{{else}}nil{{end}}
- rctx.Field = field
- rctx.PushField(field.Alias)
- defer rctx.Pop()
- {{- end }}
-
- {{- if $field.IsResolver }}
- resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
- return ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})
- })
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- if resTmp == nil {
- return graphql.Null
- }
- res := resTmp.({{$field.Signature}})
- {{- else if $field.GoVarName }}
- res := obj.{{$field.GoVarName}}
- {{- else if $field.GoMethodName }}
- {{- if $field.NoErr }}
- res := {{$field.GoMethodName}}({{ $field.CallArgs }})
- {{- else }}
- res, err := {{$field.GoMethodName}}({{ $field.CallArgs }})
- if err != nil {
- ec.Error(ctx, err)
- return graphql.Null
- }
- {{- end }}
- {{- end }}
- {{ $field.WriteJson }}
- {{- if $field.IsConcurrent }}
- })
- {{- end }}
- }
-{{ end }}
diff --git a/vendor/github.com/vektah/gqlgen/codegen/templates/generated.gotpl b/vendor/github.com/vektah/gqlgen/codegen/templates/generated.gotpl
deleted file mode 100644
index cc1dc459..00000000
--- a/vendor/github.com/vektah/gqlgen/codegen/templates/generated.gotpl
+++ /dev/null
@@ -1,175 +0,0 @@
-// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.
-
-package {{ .PackageName }}
-
-import (
-{{- range $import := .Imports }}
- {{- $import.Write }}
-{{ end }}
-)
-
-// MakeExecutableSchema creates an ExecutableSchema from the Resolvers interface.
-func MakeExecutableSchema(resolvers Resolvers) graphql.ExecutableSchema {
- return &executableSchema{resolvers: resolvers}
-}
-
-// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
-func NewExecutableSchema(resolvers ResolverRoot) graphql.ExecutableSchema {
- return MakeExecutableSchema(shortMapper{r: resolvers})
-}
-
-type Resolvers interface {
-{{- range $object := .Objects -}}
- {{ range $field := $object.Fields -}}
- {{ $field.ResolverDeclaration }}
- {{ end }}
-{{- end }}
-}
-
-type ResolverRoot interface {
-{{- range $object := .Objects -}}
- {{ if $object.HasResolvers -}}
- {{$object.GQLType}}() {{$object.GQLType}}Resolver
- {{ end }}
-{{- end }}
-}
-
-{{- range $object := .Objects -}}
- {{ if $object.HasResolvers }}
- type {{$object.GQLType}}Resolver interface {
- {{ range $field := $object.Fields -}}
- {{ $field.ShortResolverDeclaration }}
- {{ end }}
- }
- {{- end }}
-{{- end }}
-
-type shortMapper struct {
- r ResolverRoot
-}
-
-{{- range $object := .Objects -}}
- {{ range $field := $object.Fields -}}
- {{- if $field.IsResolver }}
- func (s shortMapper) {{ $field.ResolverDeclaration }} {
- return s.r.{{$field.ShortInvocation}}
- }
- {{- end }}
- {{ end }}
-{{- end }}
-
-type executableSchema struct {
- resolvers Resolvers
-}
-
-func (e *executableSchema) Schema() *schema.Schema {
- return parsedSchema
-}
-
-func (e *executableSchema) Query(ctx context.Context, op *query.Operation) *graphql.Response {
- {{- if .QueryRoot }}
- ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
-
- buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- data := ec._{{.QueryRoot.GQLType}}(ctx, op.Selections)
- var buf bytes.Buffer
- data.MarshalGQL(&buf)
- return buf.Bytes()
- })
-
- return &graphql.Response{
- Data: buf,
- Errors: ec.Errors,
- }
- {{- else }}
- return graphql.ErrorResponse(ctx, "queries are not supported")
- {{- end }}
-}
-
-func (e *executableSchema) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {
- {{- if .MutationRoot }}
- ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
-
- buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- data := ec._{{.MutationRoot.GQLType}}(ctx, op.Selections)
- var buf bytes.Buffer
- data.MarshalGQL(&buf)
- return buf.Bytes()
- })
-
- return &graphql.Response{
- Data: buf,
- Errors: ec.Errors,
- }
- {{- else }}
- return graphql.ErrorResponse(ctx, "mutations are not supported")
- {{- end }}
-}
-
-func (e *executableSchema) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {
- {{- if .SubscriptionRoot }}
- ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
-
- next := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.Selections)
- if ec.Errors != nil {
- return graphql.OneShot(&graphql.Response{Data: []byte("null"), Errors: ec.Errors})
- }
-
- var buf bytes.Buffer
- return func() *graphql.Response {
- buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- buf.Reset()
- data := next()
-
- if data == nil {
- return nil
- }
- data.MarshalGQL(&buf)
- return buf.Bytes()
- })
-
- return &graphql.Response{
- Data: buf,
- Errors: ec.Errors,
- }
- }
- {{- else }}
- return graphql.OneShot(graphql.ErrorResponse(ctx, "subscriptions are not supported"))
- {{- end }}
-}
-
-type executionContext struct {
- *graphql.RequestContext
-
- resolvers Resolvers
-}
-
-{{- range $object := .Objects }}
- {{ template "object.gotpl" $object }}
-
- {{- range $field := $object.Fields }}
- {{ template "field.gotpl" $field }}
- {{ end }}
-{{- end}}
-
-{{- range $interface := .Interfaces }}
- {{ template "interface.gotpl" $interface }}
-{{- end }}
-
-{{- range $input := .Inputs }}
- {{ template "input.gotpl" $input }}
-{{- end }}
-
-func (ec *executionContext) introspectSchema() *introspection.Schema {
- return introspection.WrapSchema(parsedSchema)
-}
-
-func (ec *executionContext) introspectType(name string) *introspection.Type {
- t := parsedSchema.Resolve(name)
- if t == nil {
- return nil
- }
- return introspection.WrapType(t)
-}
-
-var parsedSchema = schema.MustParse({{.SchemaRaw|rawQuote}})
diff --git a/vendor/github.com/vektah/gqlgen/graphql/defer.go b/vendor/github.com/vektah/gqlgen/graphql/defer.go
deleted file mode 100644
index 79346a84..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/defer.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package graphql
-
-import (
- "io"
- "sync"
-)
-
-// Defer will begin executing the given function and immediately return a result that will block until the function completes
-func Defer(f func() Marshaler) Marshaler {
- var deferred deferred
- deferred.mu.Lock()
-
- go func() {
- deferred.result = f()
- deferred.mu.Unlock()
- }()
-
- return &deferred
-}
-
-type deferred struct {
- result Marshaler
- mu sync.Mutex
-}
-
-func (d *deferred) MarshalGQL(w io.Writer) {
- d.mu.Lock()
- d.result.MarshalGQL(w)
- d.mu.Unlock()
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/error.go b/vendor/github.com/vektah/gqlgen/graphql/error.go
deleted file mode 100644
index 15e65fab..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/error.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package graphql
-
-import (
- "context"
-)
-
-// Error is the standard graphql error type described in https://facebook.github.io/graphql/draft/#sec-Errors
-type Error struct {
- Message string `json:"message"`
- Path []interface{} `json:"path,omitempty"`
- Locations []ErrorLocation `json:"locations,omitempty"`
- Extensions map[string]interface{} `json:"extensions,omitempty"`
-}
-
-func (e *Error) Error() string {
- return e.Message
-}
-
-type ErrorLocation struct {
- Line int `json:"line,omitempty"`
- Column int `json:"column,omitempty"`
-}
-
-type ErrorPresenterFunc func(context.Context, error) *Error
-
-type ExtendedError interface {
- Extensions() map[string]interface{}
-}
-
-func DefaultErrorPresenter(ctx context.Context, err error) *Error {
- if gqlerr, ok := err.(*Error); ok {
- gqlerr.Path = GetResolverContext(ctx).Path
- return gqlerr
- }
-
- var extensions map[string]interface{}
- if ee, ok := err.(ExtendedError); ok {
- extensions = ee.Extensions()
- }
-
- return &Error{
- Message: err.Error(),
- Path: GetResolverContext(ctx).Path,
- Extensions: extensions,
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/graphql/exec.go b/vendor/github.com/vektah/gqlgen/graphql/exec.go
deleted file mode 100644
index 2c034888..00000000
--- a/vendor/github.com/vektah/gqlgen/graphql/exec.go
+++ /dev/null
@@ -1,118 +0,0 @@
-package graphql
-
-import (
- "context"
- "fmt"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-type ExecutableSchema interface {
- Schema() *schema.Schema
-
- Query(ctx context.Context, op *query.Operation) *Response
- Mutation(ctx context.Context, op *query.Operation) *Response
- Subscription(ctx context.Context, op *query.Operation) func() *Response
-}
-
-func CollectFields(doc *query.Document, selSet []query.Selection, satisfies []string, variables map[string]interface{}) []CollectedField {
- return collectFields(doc, selSet, satisfies, variables, map[string]bool{})
-}
-
-func collectFields(doc *query.Document, selSet []query.Selection, satisfies []string, variables map[string]interface{}, visited map[string]bool) []CollectedField {
- var groupedFields []CollectedField
-
- for _, sel := range selSet {
- switch sel := sel.(type) {
- case *query.Field:
- f := getOrCreateField(&groupedFields, sel.Alias.Name, func() CollectedField {
- f := CollectedField{
- Alias: sel.Alias.Name,
- Name: sel.Name.Name,
- }
- if len(sel.Arguments) > 0 {
- f.Args = map[string]interface{}{}
- for _, arg := range sel.Arguments {
- if variable, ok := arg.Value.(*common.Variable); ok {
- if val, ok := variables[variable.Name]; ok {
- f.Args[arg.Name.Name] = val
- }
- } else {
- f.Args[arg.Name.Name] = arg.Value.Value(variables)
- }
- }
- }
- return f
- })
-
- f.Selections = append(f.Selections, sel.Selections...)
- case *query.InlineFragment:
- if !instanceOf(sel.On.Ident.Name, satisfies) {
- continue
- }
-
- for _, childField := range collectFields(doc, sel.Selections, satisfies, variables, visited) {
- f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
- f.Selections = append(f.Selections, childField.Selections...)
- }
-
- case *query.FragmentSpread:
- fragmentName := sel.Name.Name
- if _, seen := visited[fragmentName]; seen {
- continue
- }
- visited[fragmentName] = true
-
- fragment := doc.Fragments.Get(fragmentName)
- if fragment == nil {
- // should never happen, validator has already run
- panic(fmt.Errorf("missing fragment %s", fragmentName))
- }
-
- if !instanceOf(fragment.On.Ident.Name, satisfies) {
- continue
- }
-
- for _, childField := range collectFields(doc, fragment.Selections, satisfies, variables, visited) {
- f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
- f.Selections = append(f.Selections, childField.Selections...)
- }
-
- default:
- panic(fmt.Errorf("unsupported %T", sel))
- }
- }
-
- return groupedFields
-}
-
-type CollectedField struct {
- Alias string
- Name string
- Args map[string]interface{}
- Selections []query.Selection
-}
-
-func instanceOf(val string, satisfies []string) bool {
- for _, s := range satisfies {
- if val == s {
- return true
- }
- }
- return false
-}
-
-func getOrCreateField(c *[]CollectedField, name string, creator func() CollectedField) *CollectedField {
- for i, cf := range *c {
- if cf.Alias == name {
- return &(*c)[i]
- }
- }
-
- f := creator()
-
- *c = append(*c, f)
- return &(*c)[len(*c)-1]
-}
diff --git a/vendor/github.com/vektah/gqlgen/handler/stub.go b/vendor/github.com/vektah/gqlgen/handler/stub.go
deleted file mode 100644
index 46b27e46..00000000
--- a/vendor/github.com/vektah/gqlgen/handler/stub.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package handler
-
-import (
- "context"
- "time"
-
- "github.com/vektah/gqlgen/graphql"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-type executableSchemaStub struct {
-}
-
-var _ graphql.ExecutableSchema = &executableSchemaStub{}
-
-func (e *executableSchemaStub) Schema() *schema.Schema {
- return schema.MustParse(`
- schema { query: Query }
- type Query { me: User! }
- type User { name: String! }
- `)
-}
-
-func (e *executableSchemaStub) Query(ctx context.Context, op *query.Operation) *graphql.Response {
- return &graphql.Response{Data: []byte(`{"name":"test"}`)}
-}
-
-func (e *executableSchemaStub) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {
- return graphql.ErrorResponse(ctx, "mutations are not supported")
-}
-
-func (e *executableSchemaStub) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {
- return func() *graphql.Response {
- time.Sleep(50 * time.Millisecond)
- select {
- case <-ctx.Done():
- return nil
- default:
- return &graphql.Response{
- Data: []byte(`{"name":"test"}`),
- }
- }
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/LICENSE b/vendor/github.com/vektah/gqlgen/neelance/LICENSE
deleted file mode 100644
index 3907ceca..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright (c) 2016 Richard Musiol. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/directive.go b/vendor/github.com/vektah/gqlgen/neelance/common/directive.go
deleted file mode 100644
index 62dca47f..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/directive.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package common
-
-type Directive struct {
- Name Ident
- Args ArgumentList
-}
-
-func ParseDirectives(l *Lexer) DirectiveList {
- var directives DirectiveList
- for l.Peek() == '@' {
- l.ConsumeToken('@')
- d := &Directive{}
- d.Name = l.ConsumeIdentWithLoc()
- d.Name.Loc.Column--
- if l.Peek() == '(' {
- d.Args = ParseArguments(l)
- }
- directives = append(directives, d)
- }
- return directives
-}
-
-type DirectiveList []*Directive
-
-func (l DirectiveList) Get(name string) *Directive {
- for _, d := range l {
- if d.Name.Name == name {
- return d
- }
- }
- return nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/lexer.go b/vendor/github.com/vektah/gqlgen/neelance/common/lexer.go
deleted file mode 100644
index fdc1e622..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/lexer.go
+++ /dev/null
@@ -1,122 +0,0 @@
-package common
-
-import (
- "fmt"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type syntaxError string
-
-type Lexer struct {
- sc *scanner.Scanner
- next rune
- descComment string
-}
-
-type Ident struct {
- Name string
- Loc errors.Location
-}
-
-func New(sc *scanner.Scanner) *Lexer {
- l := &Lexer{sc: sc}
- l.Consume()
- return l
-}
-
-func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) {
- defer func() {
- if err := recover(); err != nil {
- if err, ok := err.(syntaxError); ok {
- errRes = errors.Errorf("syntax error: %s", err)
- errRes.Locations = []errors.Location{l.Location()}
- return
- }
- panic(err)
- }
- }()
-
- f()
- return
-}
-
-func (l *Lexer) Peek() rune {
- return l.next
-}
-
-func (l *Lexer) Consume() {
- l.descComment = ""
- for {
- l.next = l.sc.Scan()
- if l.next == ',' {
- continue
- }
- if l.next == '#' {
- if l.sc.Peek() == ' ' {
- l.sc.Next()
- }
- if l.descComment != "" {
- l.descComment += "\n"
- }
- for {
- next := l.sc.Next()
- if next == '\n' || next == scanner.EOF {
- break
- }
- l.descComment += string(next)
- }
- continue
- }
- break
- }
-}
-
-func (l *Lexer) ConsumeIdent() string {
- name := l.sc.TokenText()
- l.ConsumeToken(scanner.Ident)
- return name
-}
-
-func (l *Lexer) ConsumeIdentWithLoc() Ident {
- loc := l.Location()
- name := l.sc.TokenText()
- l.ConsumeToken(scanner.Ident)
- return Ident{name, loc}
-}
-
-func (l *Lexer) ConsumeKeyword(keyword string) {
- if l.next != scanner.Ident || l.sc.TokenText() != keyword {
- l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword))
- }
- l.Consume()
-}
-
-func (l *Lexer) ConsumeLiteral() *BasicLit {
- lit := &BasicLit{Type: l.next, Text: l.sc.TokenText()}
- l.Consume()
- return lit
-}
-
-func (l *Lexer) ConsumeToken(expected rune) {
- if l.next != expected {
- l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected)))
- }
- l.Consume()
-}
-
-func (l *Lexer) DescComment() string {
- return l.descComment
-}
-
-func (l *Lexer) SyntaxError(message string) {
- panic(syntaxError(message))
-}
-
-func (l *Lexer) Location() errors.Location {
- return errors.Location{
- Line: l.sc.Line,
- Column: l.sc.Column,
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/literals.go b/vendor/github.com/vektah/gqlgen/neelance/common/literals.go
deleted file mode 100644
index 55619ba0..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/literals.go
+++ /dev/null
@@ -1,206 +0,0 @@
-package common
-
-import (
- "strconv"
- "strings"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type Literal interface {
- Value(vars map[string]interface{}) interface{}
- String() string
- Location() errors.Location
-}
-
-type BasicLit struct {
- Type rune
- Text string
- Loc errors.Location
-}
-
-func (lit *BasicLit) Value(vars map[string]interface{}) interface{} {
- switch lit.Type {
- case scanner.Int:
- value, err := strconv.ParseInt(lit.Text, 10, 64)
- if err != nil {
- panic(err)
- }
- return int(value)
-
- case scanner.Float:
- value, err := strconv.ParseFloat(lit.Text, 64)
- if err != nil {
- panic(err)
- }
- return value
-
- case scanner.String:
- value, err := strconv.Unquote(lit.Text)
- if err != nil {
- panic(err)
- }
- return value
-
- case scanner.Ident:
- switch lit.Text {
- case "true":
- return true
- case "false":
- return false
- default:
- return lit.Text
- }
-
- default:
- panic("invalid literal")
- }
-}
-
-func (lit *BasicLit) String() string {
- return lit.Text
-}
-
-func (lit *BasicLit) Location() errors.Location {
- return lit.Loc
-}
-
-type ListLit struct {
- Entries []Literal
- Loc errors.Location
-}
-
-func (lit *ListLit) Value(vars map[string]interface{}) interface{} {
- entries := make([]interface{}, len(lit.Entries))
- for i, entry := range lit.Entries {
- entries[i] = entry.Value(vars)
- }
- return entries
-}
-
-func (lit *ListLit) String() string {
- entries := make([]string, len(lit.Entries))
- for i, entry := range lit.Entries {
- entries[i] = entry.String()
- }
- return "[" + strings.Join(entries, ", ") + "]"
-}
-
-func (lit *ListLit) Location() errors.Location {
- return lit.Loc
-}
-
-type ObjectLit struct {
- Fields []*ObjectLitField
- Loc errors.Location
-}
-
-type ObjectLitField struct {
- Name Ident
- Value Literal
-}
-
-func (lit *ObjectLit) Value(vars map[string]interface{}) interface{} {
- fields := make(map[string]interface{}, len(lit.Fields))
- for _, f := range lit.Fields {
- fields[f.Name.Name] = f.Value.Value(vars)
- }
- return fields
-}
-
-func (lit *ObjectLit) String() string {
- entries := make([]string, 0, len(lit.Fields))
- for _, f := range lit.Fields {
- entries = append(entries, f.Name.Name+": "+f.Value.String())
- }
- return "{" + strings.Join(entries, ", ") + "}"
-}
-
-func (lit *ObjectLit) Location() errors.Location {
- return lit.Loc
-}
-
-type NullLit struct {
- Loc errors.Location
-}
-
-func (lit *NullLit) Value(vars map[string]interface{}) interface{} {
- return nil
-}
-
-func (lit *NullLit) String() string {
- return "null"
-}
-
-func (lit *NullLit) Location() errors.Location {
- return lit.Loc
-}
-
-type Variable struct {
- Name string
- Loc errors.Location
-}
-
-func (v Variable) Value(vars map[string]interface{}) interface{} {
- return vars[v.Name]
-}
-
-func (v Variable) String() string {
- return "$" + v.Name
-}
-
-func (v *Variable) Location() errors.Location {
- return v.Loc
-}
-
-func ParseLiteral(l *Lexer, constOnly bool) Literal {
- loc := l.Location()
- switch l.Peek() {
- case '$':
- if constOnly {
- l.SyntaxError("variable not allowed")
- panic("unreachable")
- }
- l.ConsumeToken('$')
- return &Variable{l.ConsumeIdent(), loc}
-
- case scanner.Int, scanner.Float, scanner.String, scanner.Ident:
- lit := l.ConsumeLiteral()
- if lit.Type == scanner.Ident && lit.Text == "null" {
- return &NullLit{loc}
- }
- lit.Loc = loc
- return lit
- case '-':
- l.ConsumeToken('-')
- lit := l.ConsumeLiteral()
- lit.Text = "-" + lit.Text
- lit.Loc = loc
- return lit
- case '[':
- l.ConsumeToken('[')
- var list []Literal
- for l.Peek() != ']' {
- list = append(list, ParseLiteral(l, constOnly))
- }
- l.ConsumeToken(']')
- return &ListLit{list, loc}
-
- case '{':
- l.ConsumeToken('{')
- var fields []*ObjectLitField
- for l.Peek() != '}' {
- name := l.ConsumeIdentWithLoc()
- l.ConsumeToken(':')
- value := ParseLiteral(l, constOnly)
- fields = append(fields, &ObjectLitField{name, value})
- }
- l.ConsumeToken('}')
- return &ObjectLit{fields, loc}
-
- default:
- l.SyntaxError("invalid value")
- panic("unreachable")
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/types.go b/vendor/github.com/vektah/gqlgen/neelance/common/types.go
deleted file mode 100644
index 0bbf24ef..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/types.go
+++ /dev/null
@@ -1,80 +0,0 @@
-package common
-
-import (
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type Type interface {
- Kind() string
- String() string
-}
-
-type List struct {
- OfType Type
-}
-
-type NonNull struct {
- OfType Type
-}
-
-type TypeName struct {
- Ident
-}
-
-func (*List) Kind() string { return "LIST" }
-func (*NonNull) Kind() string { return "NON_NULL" }
-func (*TypeName) Kind() string { panic("TypeName needs to be resolved to actual type") }
-
-func (t *List) String() string { return "[" + t.OfType.String() + "]" }
-func (t *NonNull) String() string { return t.OfType.String() + "!" }
-func (*TypeName) String() string { panic("TypeName needs to be resolved to actual type") }
-
-func ParseType(l *Lexer) Type {
- t := parseNullType(l)
- if l.Peek() == '!' {
- l.ConsumeToken('!')
- return &NonNull{OfType: t}
- }
- return t
-}
-
-func parseNullType(l *Lexer) Type {
- if l.Peek() == '[' {
- l.ConsumeToken('[')
- ofType := ParseType(l)
- l.ConsumeToken(']')
- return &List{OfType: ofType}
- }
-
- return &TypeName{Ident: l.ConsumeIdentWithLoc()}
-}
-
-type Resolver func(name string) Type
-
-func ResolveType(t Type, resolver Resolver) (Type, *errors.QueryError) {
- switch t := t.(type) {
- case *List:
- ofType, err := ResolveType(t.OfType, resolver)
- if err != nil {
- return nil, err
- }
- return &List{OfType: ofType}, nil
- case *NonNull:
- ofType, err := ResolveType(t.OfType, resolver)
- if err != nil {
- return nil, err
- }
- return &NonNull{OfType: ofType}, nil
- case *TypeName:
- refT := resolver(t.Name)
- if refT == nil {
- err := errors.Errorf("Unknown type %q.", t.Name)
- err.Rule = "KnownTypeNames"
- err.Locations = []errors.Location{t.Loc}
- return nil, err
- }
- return refT, nil
- default:
- return t, nil
- }
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/common/values.go b/vendor/github.com/vektah/gqlgen/neelance/common/values.go
deleted file mode 100644
index 09338da8..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/common/values.go
+++ /dev/null
@@ -1,77 +0,0 @@
-package common
-
-import (
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type InputValue struct {
- Name Ident
- Type Type
- Default Literal
- Desc string
- Loc errors.Location
- TypeLoc errors.Location
-}
-
-type InputValueList []*InputValue
-
-func (l InputValueList) Get(name string) *InputValue {
- for _, v := range l {
- if v.Name.Name == name {
- return v
- }
- }
- return nil
-}
-
-func ParseInputValue(l *Lexer) *InputValue {
- p := &InputValue{}
- p.Loc = l.Location()
- p.Desc = l.DescComment()
- p.Name = l.ConsumeIdentWithLoc()
- l.ConsumeToken(':')
- p.TypeLoc = l.Location()
- p.Type = ParseType(l)
- if l.Peek() == '=' {
- l.ConsumeToken('=')
- p.Default = ParseLiteral(l, true)
- }
- return p
-}
-
-type Argument struct {
- Name Ident
- Value Literal
-}
-
-type ArgumentList []Argument
-
-func (l ArgumentList) Get(name string) (Literal, bool) {
- for _, arg := range l {
- if arg.Name.Name == name {
- return arg.Value, true
- }
- }
- return nil, false
-}
-
-func (l ArgumentList) MustGet(name string) Literal {
- value, ok := l.Get(name)
- if !ok {
- panic("argument not found")
- }
- return value
-}
-
-func ParseArguments(l *Lexer) ArgumentList {
- var args ArgumentList
- l.ConsumeToken('(')
- for l.Peek() != ')' {
- name := l.ConsumeIdentWithLoc()
- l.ConsumeToken(':')
- value := ParseLiteral(l, false)
- args = append(args, Argument{Name: name, Value: value})
- }
- l.ConsumeToken(')')
- return args
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/errors/errors.go b/vendor/github.com/vektah/gqlgen/neelance/errors/errors.go
deleted file mode 100644
index fdfa6202..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/errors/errors.go
+++ /dev/null
@@ -1,41 +0,0 @@
-package errors
-
-import (
- "fmt"
-)
-
-type QueryError struct {
- Message string `json:"message"`
- Locations []Location `json:"locations,omitempty"`
- Path []interface{} `json:"path,omitempty"`
- Rule string `json:"-"`
- ResolverError error `json:"-"`
-}
-
-type Location struct {
- Line int `json:"line"`
- Column int `json:"column"`
-}
-
-func (a Location) Before(b Location) bool {
- return a.Line < b.Line || (a.Line == b.Line && a.Column < b.Column)
-}
-
-func Errorf(format string, a ...interface{}) *QueryError {
- return &QueryError{
- Message: fmt.Sprintf(format, a...),
- }
-}
-
-func (err *QueryError) Error() string {
- if err == nil {
- return "<nil>"
- }
- str := fmt.Sprintf("graphql: %s", err.Message)
- for _, loc := range err.Locations {
- str += fmt.Sprintf(" (line %d, column %d)", loc.Line, loc.Column)
- }
- return str
-}
-
-var _ error = &QueryError{}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/introspection/introspection.go b/vendor/github.com/vektah/gqlgen/neelance/introspection/introspection.go
deleted file mode 100644
index 5e354c9a..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/introspection/introspection.go
+++ /dev/null
@@ -1,313 +0,0 @@
-package introspection
-
-import (
- "sort"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-type Schema struct {
- schema *schema.Schema
-}
-
-// WrapSchema is only used internally.
-func WrapSchema(schema *schema.Schema) *Schema {
- return &Schema{schema}
-}
-
-func (r *Schema) Types() []Type {
- var names []string
- for name := range r.schema.Types {
- names = append(names, name)
- }
- sort.Strings(names)
-
- l := make([]Type, len(names))
- for i, name := range names {
- l[i] = Type{r.schema.Types[name]}
- }
- return l
-}
-
-func (r *Schema) Directives() []Directive {
- var names []string
- for name := range r.schema.Directives {
- names = append(names, name)
- }
- sort.Strings(names)
-
- l := make([]Directive, len(names))
- for i, name := range names {
- l[i] = Directive{r.schema.Directives[name]}
- }
- return l
-}
-
-func (r *Schema) QueryType() Type {
- t, ok := r.schema.EntryPoints["query"]
- if !ok {
- return Type{}
- }
- return Type{t}
-}
-
-func (r *Schema) MutationType() *Type {
- t, ok := r.schema.EntryPoints["mutation"]
- if !ok {
- return nil
- }
- return &Type{t}
-}
-
-func (r *Schema) SubscriptionType() *Type {
- t, ok := r.schema.EntryPoints["subscription"]
- if !ok {
- return nil
- }
- return &Type{t}
-}
-
-type Type struct {
- typ common.Type
-}
-
-// WrapType is only used internally.
-func WrapType(typ common.Type) *Type {
- return &Type{typ}
-}
-
-func (r *Type) Kind() string {
- return r.typ.Kind()
-}
-
-func (r *Type) Name() *string {
- if named, ok := r.typ.(schema.NamedType); ok {
- name := named.TypeName()
- return &name
- }
- return nil
-}
-
-func (r *Type) Description() *string {
- if named, ok := r.typ.(schema.NamedType); ok {
- desc := named.Description()
- if desc == "" {
- return nil
- }
- return &desc
- }
- return nil
-}
-
-func (r *Type) Fields(includeDeprecated bool) []Field {
- var fields schema.FieldList
- switch t := r.typ.(type) {
- case *schema.Object:
- fields = t.Fields
- case *schema.Interface:
- fields = t.Fields
- default:
- return nil
- }
-
- var l []Field
- for _, f := range fields {
- if d := f.Directives.Get("deprecated"); d == nil || includeDeprecated {
- l = append(l, Field{f})
- }
- }
- return l
-}
-
-func (r *Type) Interfaces() []Type {
- t, ok := r.typ.(*schema.Object)
- if !ok {
- return nil
- }
-
- l := make([]Type, len(t.Interfaces))
- for i, intf := range t.Interfaces {
- l[i] = Type{intf}
- }
- return l
-}
-
-func (r *Type) PossibleTypes() []Type {
- var possibleTypes []*schema.Object
- switch t := r.typ.(type) {
- case *schema.Interface:
- possibleTypes = t.PossibleTypes
- case *schema.Union:
- possibleTypes = t.PossibleTypes
- default:
- return nil
- }
-
- l := make([]Type, len(possibleTypes))
- for i, intf := range possibleTypes {
- l[i] = Type{intf}
- }
- return l
-}
-
-func (r *Type) EnumValues(includeDeprecated bool) []EnumValue {
- t, ok := r.typ.(*schema.Enum)
- if !ok {
- return nil
- }
-
- var l []EnumValue
- for _, v := range t.Values {
- if d := v.Directives.Get("deprecated"); d == nil || includeDeprecated {
- l = append(l, EnumValue{v})
- }
- }
- return l
-}
-
-func (r *Type) InputFields() []InputValue {
- t, ok := r.typ.(*schema.InputObject)
- if !ok {
- return nil
- }
-
- l := make([]InputValue, len(t.Values))
- for i, v := range t.Values {
- l[i] = InputValue{v}
- }
- return l
-}
-
-func (r *Type) OfType() *Type {
- switch t := r.typ.(type) {
- case *common.List:
- return &Type{t.OfType}
- case *common.NonNull:
- return &Type{t.OfType}
- default:
- return nil
- }
-}
-
-type Field struct {
- field *schema.Field
-}
-
-func (r *Field) Name() string {
- return r.field.Name
-}
-
-func (r *Field) Description() *string {
- if r.field.Desc == "" {
- return nil
- }
- return &r.field.Desc
-}
-
-func (r *Field) Args() []InputValue {
- l := make([]InputValue, len(r.field.Args))
- for i, v := range r.field.Args {
- l[i] = InputValue{v}
- }
- return l
-}
-
-func (r *Field) Type() Type {
- return Type{r.field.Type}
-}
-
-func (r *Field) IsDeprecated() bool {
- return r.field.Directives.Get("deprecated") != nil
-}
-
-func (r *Field) DeprecationReason() *string {
- d := r.field.Directives.Get("deprecated")
- if d == nil {
- return nil
- }
- reason := d.Args.MustGet("reason").Value(nil).(string)
- return &reason
-}
-
-type InputValue struct {
- value *common.InputValue
-}
-
-func (r *InputValue) Name() string {
- return r.value.Name.Name
-}
-
-func (r *InputValue) Description() *string {
- if r.value.Desc == "" {
- return nil
- }
- return &r.value.Desc
-}
-
-func (r *InputValue) Type() Type {
- return Type{r.value.Type}
-}
-
-func (r *InputValue) DefaultValue() *string {
- if r.value.Default == nil {
- return nil
- }
- s := r.value.Default.String()
- return &s
-}
-
-type EnumValue struct {
- value *schema.EnumValue
-}
-
-func (r *EnumValue) Name() string {
- return r.value.Name
-}
-
-func (r *EnumValue) Description() *string {
- if r.value.Desc == "" {
- return nil
- }
- return &r.value.Desc
-}
-
-func (r *EnumValue) IsDeprecated() bool {
- return r.value.Directives.Get("deprecated") != nil
-}
-
-func (r *EnumValue) DeprecationReason() *string {
- d := r.value.Directives.Get("deprecated")
- if d == nil {
- return nil
- }
- reason := d.Args.MustGet("reason").Value(nil).(string)
- return &reason
-}
-
-type Directive struct {
- directive *schema.DirectiveDecl
-}
-
-func (r *Directive) Name() string {
- return r.directive.Name
-}
-
-func (r *Directive) Description() *string {
- if r.directive.Desc == "" {
- return nil
- }
- return &r.directive.Desc
-}
-
-func (r *Directive) Locations() []string {
- return r.directive.Locs
-}
-
-func (r *Directive) Args() []InputValue {
- l := make([]InputValue, len(r.directive.Args))
- for i, v := range r.directive.Args {
- l[i] = InputValue{v}
- }
- return l
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/query/query.go b/vendor/github.com/vektah/gqlgen/neelance/query/query.go
deleted file mode 100644
index b6f35354..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/query/query.go
+++ /dev/null
@@ -1,261 +0,0 @@
-package query
-
-import (
- "fmt"
- "strings"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type Document struct {
- Operations OperationList
- Fragments FragmentList
-}
-
-type OperationList []*Operation
-
-func (l OperationList) Get(name string) *Operation {
- for _, f := range l {
- if f.Name.Name == name {
- return f
- }
- }
- return nil
-}
-
-type FragmentList []*FragmentDecl
-
-func (l FragmentList) Get(name string) *FragmentDecl {
- for _, f := range l {
- if f.Name.Name == name {
- return f
- }
- }
- return nil
-}
-
-type Operation struct {
- Type OperationType
- Name common.Ident
- Vars common.InputValueList
- Selections []Selection
- Directives common.DirectiveList
- Loc errors.Location
-}
-
-type OperationType string
-
-const (
- Query OperationType = "QUERY"
- Mutation = "MUTATION"
- Subscription = "SUBSCRIPTION"
-)
-
-type Fragment struct {
- On common.TypeName
- Selections []Selection
-}
-
-type FragmentDecl struct {
- Fragment
- Name common.Ident
- Directives common.DirectiveList
- Loc errors.Location
-}
-
-type Selection interface {
- isSelection()
-}
-
-type Field struct {
- Alias common.Ident
- Name common.Ident
- Arguments common.ArgumentList
- Directives common.DirectiveList
- Selections []Selection
- SelectionSetLoc errors.Location
-}
-
-type InlineFragment struct {
- Fragment
- Directives common.DirectiveList
- Loc errors.Location
-}
-
-type FragmentSpread struct {
- Name common.Ident
- Directives common.DirectiveList
- Loc errors.Location
-}
-
-func (Field) isSelection() {}
-func (InlineFragment) isSelection() {}
-func (FragmentSpread) isSelection() {}
-
-func Parse(queryString string) (*Document, *errors.QueryError) {
- sc := &scanner.Scanner{
- Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings,
- }
- sc.Init(strings.NewReader(queryString))
-
- l := common.New(sc)
- var doc *Document
- err := l.CatchSyntaxError(func() {
- doc = parseDocument(l)
- })
- if err != nil {
- return nil, err
- }
-
- return doc, nil
-}
-
-func parseDocument(l *common.Lexer) *Document {
- d := &Document{}
- for l.Peek() != scanner.EOF {
- if l.Peek() == '{' {
- op := &Operation{Type: Query, Loc: l.Location()}
- op.Selections = parseSelectionSet(l)
- d.Operations = append(d.Operations, op)
- continue
- }
-
- loc := l.Location()
- switch x := l.ConsumeIdent(); x {
- case "query":
- op := parseOperation(l, Query)
- op.Loc = loc
- d.Operations = append(d.Operations, op)
-
- case "mutation":
- d.Operations = append(d.Operations, parseOperation(l, Mutation))
-
- case "subscription":
- d.Operations = append(d.Operations, parseOperation(l, Subscription))
-
- case "fragment":
- frag := parseFragment(l)
- frag.Loc = loc
- d.Fragments = append(d.Fragments, frag)
-
- default:
- l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "fragment"`, x))
- }
- }
- return d
-}
-
-func parseOperation(l *common.Lexer, opType OperationType) *Operation {
- op := &Operation{Type: opType}
- op.Name.Loc = l.Location()
- if l.Peek() == scanner.Ident {
- op.Name = l.ConsumeIdentWithLoc()
- }
- op.Directives = common.ParseDirectives(l)
- if l.Peek() == '(' {
- l.ConsumeToken('(')
- for l.Peek() != ')' {
- loc := l.Location()
- l.ConsumeToken('$')
- iv := common.ParseInputValue(l)
- iv.Loc = loc
- op.Vars = append(op.Vars, iv)
- }
- l.ConsumeToken(')')
- }
- op.Selections = parseSelectionSet(l)
- return op
-}
-
-func parseFragment(l *common.Lexer) *FragmentDecl {
- f := &FragmentDecl{}
- f.Name = l.ConsumeIdentWithLoc()
- l.ConsumeKeyword("on")
- f.On = common.TypeName{Ident: l.ConsumeIdentWithLoc()}
- f.Directives = common.ParseDirectives(l)
- f.Selections = parseSelectionSet(l)
- return f
-}
-
-func parseSelectionSet(l *common.Lexer) []Selection {
- var sels []Selection
- l.ConsumeToken('{')
- for l.Peek() != '}' {
- sels = append(sels, parseSelection(l))
- }
- l.ConsumeToken('}')
- return sels
-}
-
-func parseSelection(l *common.Lexer) Selection {
- if l.Peek() == '.' {
- return parseSpread(l)
- }
- return parseField(l)
-}
-
-func parseField(l *common.Lexer) *Field {
- f := &Field{}
- f.Alias = l.ConsumeIdentWithLoc()
- f.Name = f.Alias
- if l.Peek() == ':' {
- l.ConsumeToken(':')
- f.Name = l.ConsumeIdentWithLoc()
- }
- if l.Peek() == '(' {
- f.Arguments = common.ParseArguments(l)
- }
- f.Directives = common.ParseDirectives(l)
- if l.Peek() == '{' {
- f.SelectionSetLoc = l.Location()
- f.Selections = parseSelectionSet(l)
- }
- return f
-}
-
-func parseSpread(l *common.Lexer) Selection {
- loc := l.Location()
- l.ConsumeToken('.')
- l.ConsumeToken('.')
- l.ConsumeToken('.')
-
- f := &InlineFragment{Loc: loc}
- if l.Peek() == scanner.Ident {
- ident := l.ConsumeIdentWithLoc()
- if ident.Name != "on" {
- fs := &FragmentSpread{
- Name: ident,
- Loc: loc,
- }
- fs.Directives = common.ParseDirectives(l)
- return fs
- }
- f.On = common.TypeName{Ident: l.ConsumeIdentWithLoc()}
- }
- f.Directives = common.ParseDirectives(l)
- f.Selections = parseSelectionSet(l)
- return f
-}
-
-func (d *Document) GetOperation(operationName string) (*Operation, error) {
- if len(d.Operations) == 0 {
- return nil, fmt.Errorf("no operations in query document")
- }
-
- if operationName == "" {
- if len(d.Operations) > 1 {
- return nil, fmt.Errorf("more than one operation in query document and no operation name given")
- }
- for _, op := range d.Operations {
- return op, nil // return the one and only operation
- }
- }
-
- op := d.Operations.Get(operationName)
- if op == nil {
- return nil, fmt.Errorf("no operation with name %q", operationName)
- }
- return op, nil
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/schema/meta.go b/vendor/github.com/vektah/gqlgen/neelance/schema/meta.go
deleted file mode 100644
index efdcaa2c..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/schema/meta.go
+++ /dev/null
@@ -1,193 +0,0 @@
-package schema
-
-var Meta *Schema
-
-func init() {
- Meta = &Schema{} // bootstrap
- Meta = New()
- if err := Meta.Parse(metaSrc); err != nil {
- panic(err)
- }
-}
-
-var metaSrc = `
- # The ` + "`" + `Int` + "`" + ` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
- scalar Int
-
- # The ` + "`" + `Float` + "`" + ` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
- scalar Float
-
- # The ` + "`" + `String` + "`" + ` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
- scalar String
-
- # The ` + "`" + `Boolean` + "`" + ` scalar type represents ` + "`" + `true` + "`" + ` or ` + "`" + `false` + "`" + `.
- scalar Boolean
-
- # The ` + "`" + `ID` + "`" + ` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as ` + "`" + `"4"` + "`" + `) or integer (such as ` + "`" + `4` + "`" + `) input value will be accepted as an ID.
- scalar ID
-
- # The ` + "`" + `Map` + "`" + ` scalar type is a simple json object
- scalar Map
-
- # Directs the executor to include this field or fragment only when the ` + "`" + `if` + "`" + ` argument is true.
- directive @include(
- # Included when true.
- if: Boolean!
- ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
-
- # Directs the executor to skip this field or fragment when the ` + "`" + `if` + "`" + ` argument is true.
- directive @skip(
- # Skipped when true.
- if: Boolean!
- ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
-
- # Marks an element of a GraphQL schema as no longer supported.
- directive @deprecated(
- # Explains why this element was deprecated, usually also including a suggestion
- # for how to access supported similar data. Formatted in
- # [Markdown](https://daringfireball.net/projects/markdown/).
- reason: String = "No longer supported"
- ) on FIELD_DEFINITION | ENUM_VALUE
-
- # A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.
- #
- # In some cases, you need to provide options to alter GraphQL's execution behavior
- # in ways field arguments will not suffice, such as conditionally including or
- # skipping a field. Directives provide this by describing additional information
- # to the executor.
- type __Directive {
- name: String!
- description: String
- locations: [__DirectiveLocation!]!
- args: [__InputValue!]!
- }
-
- # A Directive can be adjacent to many parts of the GraphQL language, a
- # __DirectiveLocation describes one such possible adjacencies.
- enum __DirectiveLocation {
- # Location adjacent to a query operation.
- QUERY
- # Location adjacent to a mutation operation.
- MUTATION
- # Location adjacent to a subscription operation.
- SUBSCRIPTION
- # Location adjacent to a field.
- FIELD
- # Location adjacent to a fragment definition.
- FRAGMENT_DEFINITION
- # Location adjacent to a fragment spread.
- FRAGMENT_SPREAD
- # Location adjacent to an inline fragment.
- INLINE_FRAGMENT
- # Location adjacent to a schema definition.
- SCHEMA
- # Location adjacent to a scalar definition.
- SCALAR
- # Location adjacent to an object type definition.
- OBJECT
- # Location adjacent to a field definition.
- FIELD_DEFINITION
- # Location adjacent to an argument definition.
- ARGUMENT_DEFINITION
- # Location adjacent to an interface definition.
- INTERFACE
- # Location adjacent to a union definition.
- UNION
- # Location adjacent to an enum definition.
- ENUM
- # Location adjacent to an enum value definition.
- ENUM_VALUE
- # Location adjacent to an input object type definition.
- INPUT_OBJECT
- # Location adjacent to an input object field definition.
- INPUT_FIELD_DEFINITION
- }
-
- # One possible value for a given Enum. Enum values are unique values, not a
- # placeholder for a string or numeric value. However an Enum value is returned in
- # a JSON response as a string.
- type __EnumValue {
- name: String!
- description: String
- isDeprecated: Boolean!
- deprecationReason: String
- }
-
- # Object and Interface types are described by a list of Fields, each of which has
- # a name, potentially a list of arguments, and a return type.
- type __Field {
- name: String!
- description: String
- args: [__InputValue!]!
- type: __Type!
- isDeprecated: Boolean!
- deprecationReason: String
- }
-
- # Arguments provided to Fields or Directives and the input fields of an
- # InputObject are represented as Input Values which describe their type and
- # optionally a default value.
- type __InputValue {
- name: String!
- description: String
- type: __Type!
- # A GraphQL-formatted string representing the default value for this input value.
- defaultValue: String
- }
-
- # A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all
- # available types and directives on the server, as well as the entry points for
- # query, mutation, and subscription operations.
- type __Schema {
- # A list of all types supported by this server.
- types: [__Type!]!
- # The type that query operations will be rooted at.
- queryType: __Type!
- # If this server supports mutation, the type that mutation operations will be rooted at.
- mutationType: __Type
- # If this server support subscription, the type that subscription operations will be rooted at.
- subscriptionType: __Type
- # A list of all directives supported by this server.
- directives: [__Directive!]!
- }
-
- # The fundamental unit of any GraphQL Schema is the type. There are many kinds of
- # types in GraphQL as represented by the ` + "`" + `__TypeKind` + "`" + ` enum.
- #
- # Depending on the kind of a type, certain fields describe information about that
- # type. Scalar types provide no information beyond a name and description, while
- # Enum types provide their values. Object and Interface types provide the fields
- # they describe. Abstract types, Union and Interface, provide the Object types
- # possible at runtime. List and NonNull types compose other types.
- type __Type {
- kind: __TypeKind!
- name: String
- description: String
- fields(includeDeprecated: Boolean = false): [__Field!]
- interfaces: [__Type!]
- possibleTypes: [__Type!]
- enumValues(includeDeprecated: Boolean = false): [__EnumValue!]
- inputFields: [__InputValue!]
- ofType: __Type
- }
-
- # An enum describing what kind of type a given ` + "`" + `__Type` + "`" + ` is.
- enum __TypeKind {
- # Indicates this type is a scalar.
- SCALAR
- # Indicates this type is an object. ` + "`" + `fields` + "`" + ` and ` + "`" + `interfaces` + "`" + ` are valid fields.
- OBJECT
- # Indicates this type is an interface. ` + "`" + `fields` + "`" + ` and ` + "`" + `possibleTypes` + "`" + ` are valid fields.
- INTERFACE
- # Indicates this type is a union. ` + "`" + `possibleTypes` + "`" + ` is a valid field.
- UNION
- # Indicates this type is an enum. ` + "`" + `enumValues` + "`" + ` is a valid field.
- ENUM
- # Indicates this type is an input object. ` + "`" + `inputFields` + "`" + ` is a valid field.
- INPUT_OBJECT
- # Indicates this type is a list. ` + "`" + `ofType` + "`" + ` is a valid field.
- LIST
- # Indicates this type is a non-null. ` + "`" + `ofType` + "`" + ` is a valid field.
- NON_NULL
- }
-`
diff --git a/vendor/github.com/vektah/gqlgen/neelance/schema/schema.go b/vendor/github.com/vektah/gqlgen/neelance/schema/schema.go
deleted file mode 100644
index 0b1317a5..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/schema/schema.go
+++ /dev/null
@@ -1,489 +0,0 @@
-package schema
-
-import (
- "fmt"
- "strings"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/errors"
-)
-
-type Schema struct {
- EntryPoints map[string]NamedType
- Types map[string]NamedType
- Directives map[string]*DirectiveDecl
-
- entryPointNames map[string]string
- objects []*Object
- unions []*Union
- enums []*Enum
-}
-
-var defaultEntrypoints = map[string]string{
- "query": "Query",
- "mutation": "Mutation",
- "subscription": "Subscription",
-}
-
-func (s *Schema) Resolve(name string) common.Type {
- return s.Types[name]
-}
-
-type NamedType interface {
- common.Type
- TypeName() string
- Description() string
-}
-
-type Scalar struct {
- Name string
- Desc string
-}
-
-type Object struct {
- Name string
- Interfaces []*Interface
- Fields FieldList
- Desc string
-
- interfaceNames []string
-}
-
-type Interface struct {
- Name string
- PossibleTypes []*Object
- Fields FieldList
- Desc string
-}
-
-type Union struct {
- Name string
- PossibleTypes []*Object
- Desc string
-
- typeNames []string
-}
-
-type Enum struct {
- Name string
- Values []*EnumValue
- Desc string
-}
-
-type EnumValue struct {
- Name string
- Directives common.DirectiveList
- Desc string
-}
-
-type InputObject struct {
- Name string
- Desc string
- Values common.InputValueList
-}
-
-type FieldList []*Field
-
-func (l FieldList) Get(name string) *Field {
- for _, f := range l {
- if f.Name == name {
- return f
- }
- }
- return nil
-}
-
-func (l FieldList) Names() []string {
- names := make([]string, len(l))
- for i, f := range l {
- names[i] = f.Name
- }
- return names
-}
-
-type DirectiveDecl struct {
- Name string
- Desc string
- Locs []string
- Args common.InputValueList
-}
-
-func (*Scalar) Kind() string { return "SCALAR" }
-func (*Object) Kind() string { return "OBJECT" }
-func (*Interface) Kind() string { return "INTERFACE" }
-func (*Union) Kind() string { return "UNION" }
-func (*Enum) Kind() string { return "ENUM" }
-func (*InputObject) Kind() string { return "INPUT_OBJECT" }
-
-func (t *Scalar) String() string { return t.Name }
-func (t *Object) String() string { return t.Name }
-func (t *Interface) String() string { return t.Name }
-func (t *Union) String() string { return t.Name }
-func (t *Enum) String() string { return t.Name }
-func (t *InputObject) String() string { return t.Name }
-
-func (t *Scalar) TypeName() string { return t.Name }
-func (t *Object) TypeName() string { return t.Name }
-func (t *Interface) TypeName() string { return t.Name }
-func (t *Union) TypeName() string { return t.Name }
-func (t *Enum) TypeName() string { return t.Name }
-func (t *InputObject) TypeName() string { return t.Name }
-
-func (t *Scalar) Description() string { return t.Desc }
-func (t *Object) Description() string { return t.Desc }
-func (t *Interface) Description() string { return t.Desc }
-func (t *Union) Description() string { return t.Desc }
-func (t *Enum) Description() string { return t.Desc }
-func (t *InputObject) Description() string { return t.Desc }
-
-type Field struct {
- Name string
- Args common.InputValueList
- Type common.Type
- Directives common.DirectiveList
- Desc string
-}
-
-func MustParse(str string) *Schema {
- s := New()
- err := s.Parse(str)
- if err != nil {
- panic(err)
- }
- return s
-}
-
-func New() *Schema {
- s := &Schema{
- entryPointNames: make(map[string]string),
- Types: make(map[string]NamedType),
- Directives: make(map[string]*DirectiveDecl),
- }
- for n, t := range Meta.Types {
- s.Types[n] = t
- }
- for n, d := range Meta.Directives {
- s.Directives[n] = d
- }
- return s
-}
-
-func (s *Schema) Parse(schemaString string) error {
- sc := &scanner.Scanner{
- Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings,
- }
- sc.Init(strings.NewReader(schemaString))
-
- l := common.New(sc)
- err := l.CatchSyntaxError(func() {
- parseSchema(s, l)
- })
- if err != nil {
- return err
- }
-
- for _, t := range s.Types {
- if err := resolveNamedType(s, t); err != nil {
- return err
- }
- }
- for _, d := range s.Directives {
- for _, arg := range d.Args {
- t, err := common.ResolveType(arg.Type, s.Resolve)
- if err != nil {
- return err
- }
- arg.Type = t
- }
- }
-
- s.EntryPoints = make(map[string]NamedType)
- for key, name := range s.entryPointNames {
- t, ok := s.Types[name]
- if !ok {
- if !ok {
- return errors.Errorf("type %q not found", name)
- }
- }
- s.EntryPoints[key] = t
- }
-
- for entrypointName, typeName := range defaultEntrypoints {
- if _, ok := s.EntryPoints[entrypointName]; ok {
- continue
- }
-
- if _, ok := s.Types[typeName]; !ok {
- continue
- }
-
- s.EntryPoints[entrypointName] = s.Types[typeName]
- }
-
- for _, obj := range s.objects {
- obj.Interfaces = make([]*Interface, len(obj.interfaceNames))
- for i, intfName := range obj.interfaceNames {
- t, ok := s.Types[intfName]
- if !ok {
- return errors.Errorf("interface %q not found", intfName)
- }
- intf, ok := t.(*Interface)
- if !ok {
- return errors.Errorf("type %q is not an interface", intfName)
- }
- obj.Interfaces[i] = intf
- intf.PossibleTypes = append(intf.PossibleTypes, obj)
- }
- }
-
- for _, union := range s.unions {
- union.PossibleTypes = make([]*Object, len(union.typeNames))
- for i, name := range union.typeNames {
- t, ok := s.Types[name]
- if !ok {
- return errors.Errorf("object type %q not found", name)
- }
- obj, ok := t.(*Object)
- if !ok {
- return errors.Errorf("type %q is not an object", name)
- }
- union.PossibleTypes[i] = obj
- }
- }
-
- for _, enum := range s.enums {
- for _, value := range enum.Values {
- if err := resolveDirectives(s, value.Directives); err != nil {
- return err
- }
- }
- }
-
- return nil
-}
-
-func resolveNamedType(s *Schema, t NamedType) error {
- switch t := t.(type) {
- case *Object:
- for _, f := range t.Fields {
- if err := resolveField(s, f); err != nil {
- return err
- }
- }
- case *Interface:
- for _, f := range t.Fields {
- if err := resolveField(s, f); err != nil {
- return err
- }
- }
- case *InputObject:
- if err := resolveInputObject(s, t.Values); err != nil {
- return err
- }
- }
- return nil
-}
-
-func resolveField(s *Schema, f *Field) error {
- t, err := common.ResolveType(f.Type, s.Resolve)
- if err != nil {
- return err
- }
- f.Type = t
- if err := resolveDirectives(s, f.Directives); err != nil {
- return err
- }
- return resolveInputObject(s, f.Args)
-}
-
-func resolveDirectives(s *Schema, directives common.DirectiveList) error {
- for _, d := range directives {
- dirName := d.Name.Name
- dd, ok := s.Directives[dirName]
- if !ok {
- return errors.Errorf("directive %q not found", dirName)
- }
- for _, arg := range d.Args {
- if dd.Args.Get(arg.Name.Name) == nil {
- return errors.Errorf("invalid argument %q for directive %q", arg.Name.Name, dirName)
- }
- }
- for _, arg := range dd.Args {
- if _, ok := d.Args.Get(arg.Name.Name); !ok {
- d.Args = append(d.Args, common.Argument{Name: arg.Name, Value: arg.Default})
- }
- }
- }
- return nil
-}
-
-func resolveInputObject(s *Schema, values common.InputValueList) error {
- for _, v := range values {
- t, err := common.ResolveType(v.Type, s.Resolve)
- if err != nil {
- return err
- }
- v.Type = t
- }
- return nil
-}
-
-func parseSchema(s *Schema, l *common.Lexer) {
- for l.Peek() != scanner.EOF {
- desc := l.DescComment()
- switch x := l.ConsumeIdent(); x {
- case "schema":
- l.ConsumeToken('{')
- for l.Peek() != '}' {
- name := l.ConsumeIdent()
- l.ConsumeToken(':')
- typ := l.ConsumeIdent()
- s.entryPointNames[name] = typ
- }
- l.ConsumeToken('}')
- case "type":
- obj := parseObjectDecl(l)
- obj.Desc = desc
- s.Types[obj.Name] = obj
- s.objects = append(s.objects, obj)
- case "interface":
- intf := parseInterfaceDecl(l)
- intf.Desc = desc
- s.Types[intf.Name] = intf
- case "union":
- union := parseUnionDecl(l)
- union.Desc = desc
- s.Types[union.Name] = union
- s.unions = append(s.unions, union)
- case "enum":
- enum := parseEnumDecl(l)
- enum.Desc = desc
- s.Types[enum.Name] = enum
- s.enums = append(s.enums, enum)
- case "input":
- input := parseInputDecl(l)
- input.Desc = desc
- s.Types[input.Name] = input
- case "scalar":
- name := l.ConsumeIdent()
- s.Types[name] = &Scalar{Name: name, Desc: desc}
- case "directive":
- directive := parseDirectiveDecl(l)
- directive.Desc = desc
- s.Directives[directive.Name] = directive
- default:
- l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "schema", "type", "enum", "interface", "union", "input", "scalar" or "directive"`, x))
- }
- }
-}
-
-func parseObjectDecl(l *common.Lexer) *Object {
- o := &Object{}
- o.Name = l.ConsumeIdent()
- if l.Peek() == scanner.Ident {
- l.ConsumeKeyword("implements")
- for {
- o.interfaceNames = append(o.interfaceNames, l.ConsumeIdent())
- if l.Peek() == '{' {
- break
- }
- }
- }
- l.ConsumeToken('{')
- o.Fields = parseFields(l)
- l.ConsumeToken('}')
- return o
-}
-
-func parseInterfaceDecl(l *common.Lexer) *Interface {
- i := &Interface{}
- i.Name = l.ConsumeIdent()
- l.ConsumeToken('{')
- i.Fields = parseFields(l)
- l.ConsumeToken('}')
- return i
-}
-
-func parseUnionDecl(l *common.Lexer) *Union {
- union := &Union{}
- union.Name = l.ConsumeIdent()
- l.ConsumeToken('=')
- union.typeNames = []string{l.ConsumeIdent()}
- for l.Peek() == '|' {
- l.ConsumeToken('|')
- union.typeNames = append(union.typeNames, l.ConsumeIdent())
- }
- return union
-}
-
-func parseInputDecl(l *common.Lexer) *InputObject {
- i := &InputObject{}
- i.Name = l.ConsumeIdent()
- l.ConsumeToken('{')
- for l.Peek() != '}' {
- i.Values = append(i.Values, common.ParseInputValue(l))
- }
- l.ConsumeToken('}')
- return i
-}
-
-func parseEnumDecl(l *common.Lexer) *Enum {
- enum := &Enum{}
- enum.Name = l.ConsumeIdent()
- l.ConsumeToken('{')
- for l.Peek() != '}' {
- v := &EnumValue{}
- v.Desc = l.DescComment()
- v.Name = l.ConsumeIdent()
- v.Directives = common.ParseDirectives(l)
- enum.Values = append(enum.Values, v)
- }
- l.ConsumeToken('}')
- return enum
-}
-
-func parseDirectiveDecl(l *common.Lexer) *DirectiveDecl {
- d := &DirectiveDecl{}
- l.ConsumeToken('@')
- d.Name = l.ConsumeIdent()
- if l.Peek() == '(' {
- l.ConsumeToken('(')
- for l.Peek() != ')' {
- v := common.ParseInputValue(l)
- d.Args = append(d.Args, v)
- }
- l.ConsumeToken(')')
- }
- l.ConsumeKeyword("on")
- for {
- loc := l.ConsumeIdent()
- d.Locs = append(d.Locs, loc)
- if l.Peek() != '|' {
- break
- }
- l.ConsumeToken('|')
- }
- return d
-}
-
-func parseFields(l *common.Lexer) FieldList {
- var fields FieldList
- for l.Peek() != '}' {
- f := &Field{}
- f.Desc = l.DescComment()
- f.Name = l.ConsumeIdent()
- if l.Peek() == '(' {
- l.ConsumeToken('(')
- for l.Peek() != ')' {
- f.Args = append(f.Args, common.ParseInputValue(l))
- }
- l.ConsumeToken(')')
- }
- l.ConsumeToken(':')
- f.Type = common.ParseType(l)
- f.Directives = common.ParseDirectives(l)
- fields = append(fields, f)
- }
- return fields
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/validation/suggestion.go b/vendor/github.com/vektah/gqlgen/neelance/validation/suggestion.go
deleted file mode 100644
index 9702b5f5..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/validation/suggestion.go
+++ /dev/null
@@ -1,71 +0,0 @@
-package validation
-
-import (
- "fmt"
- "sort"
- "strconv"
- "strings"
-)
-
-func makeSuggestion(prefix string, options []string, input string) string {
- var selected []string
- distances := make(map[string]int)
- for _, opt := range options {
- distance := levenshteinDistance(input, opt)
- threshold := max(len(input)/2, max(len(opt)/2, 1))
- if distance < threshold {
- selected = append(selected, opt)
- distances[opt] = distance
- }
- }
-
- if len(selected) == 0 {
- return ""
- }
- sort.Slice(selected, func(i, j int) bool {
- return distances[selected[i]] < distances[selected[j]]
- })
-
- parts := make([]string, len(selected))
- for i, opt := range selected {
- parts[i] = strconv.Quote(opt)
- }
- if len(parts) > 1 {
- parts[len(parts)-1] = "or " + parts[len(parts)-1]
- }
- return fmt.Sprintf(" %s %s?", prefix, strings.Join(parts, ", "))
-}
-
-func levenshteinDistance(s1, s2 string) int {
- column := make([]int, len(s1)+1)
- for y := range s1 {
- column[y+1] = y + 1
- }
- for x, rx := range s2 {
- column[0] = x + 1
- lastdiag := x
- for y, ry := range s1 {
- olddiag := column[y+1]
- if rx != ry {
- lastdiag++
- }
- column[y+1] = min(column[y+1]+1, min(column[y]+1, lastdiag))
- lastdiag = olddiag
- }
- }
- return column[len(s1)]
-}
-
-func min(a, b int) int {
- if a < b {
- return a
- }
- return b
-}
-
-func max(a, b int) int {
- if a > b {
- return a
- }
- return b
-}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/validation/validation.go b/vendor/github.com/vektah/gqlgen/neelance/validation/validation.go
deleted file mode 100644
index 28124310..00000000
--- a/vendor/github.com/vektah/gqlgen/neelance/validation/validation.go
+++ /dev/null
@@ -1,861 +0,0 @@
-package validation
-
-import (
- "fmt"
- "math"
- "reflect"
- "strconv"
- "strings"
- "text/scanner"
-
- "github.com/vektah/gqlgen/neelance/common"
- "github.com/vektah/gqlgen/neelance/errors"
- "github.com/vektah/gqlgen/neelance/query"
- "github.com/vektah/gqlgen/neelance/schema"
-)
-
-type varSet map[*common.InputValue]struct{}
-
-type selectionPair struct{ a, b query.Selection }
-
-type fieldInfo struct {
- sf *schema.Field
- parent schema.NamedType
-}
-
-type context struct {
- schema *schema.Schema
- doc *query.Document
- errs []*errors.QueryError
- opErrs map[*query.Operation][]*errors.QueryError
- usedVars map[*query.Operation]varSet
- fieldMap map[*query.Field]fieldInfo
- overlapValidated map[selectionPair]struct{}
-}
-
-func (c *context) addErr(loc errors.Location, rule string, format string, a ...interface{}) {
- c.addErrMultiLoc([]errors.Location{loc}, rule, format, a...)
-}
-
-func (c *context) addErrMultiLoc(locs []errors.Location, rule string, format string, a ...interface{}) {
- c.errs = append(c.errs, &errors.QueryError{
- Message: fmt.Sprintf(format, a...),
- Locations: locs,
- Rule: rule,
- })
-}
-
-type opContext struct {
- *context
- ops []*query.Operation
-}
-
-func Validate(s *schema.Schema, doc *query.Document) []*errors.QueryError {
- c := &context{
- schema: s,
- doc: doc,
- opErrs: make(map[*query.Operation][]*errors.QueryError),
- usedVars: make(map[*query.Operation]varSet),
- fieldMap: make(map[*query.Field]fieldInfo),
- overlapValidated: make(map[selectionPair]struct{}),
- }
-
- opNames := make(nameSet)
- fragUsedBy := make(map[*query.FragmentDecl][]*query.Operation)
- for _, op := range doc.Operations {
- c.usedVars[op] = make(varSet)
- opc := &opContext{c, []*query.Operation{op}}
-
- if op.Name.Name == "" && len(doc.Operations) != 1 {
- c.addErr(op.Loc, "LoneAnonymousOperation", "This anonymous operation must be the only defined operation.")
- }
- if op.Name.Name != "" {
- validateName(c, opNames, op.Name, "UniqueOperationNames", "operation")
- }
-
- validateDirectives(opc, string(op.Type), op.Directives)
-
- varNames := make(nameSet)
- for _, v := range op.Vars {
- validateName(c, varNames, v.Name, "UniqueVariableNames", "variable")
-
- t := resolveType(c, v.Type)
- if !canBeInput(t) {
- c.addErr(v.TypeLoc, "VariablesAreInputTypes", "Variable %q cannot be non-input type %q.", "$"+v.Name.Name, t)
- }
-
- if v.Default != nil {
- validateLiteral(opc, v.Default)
-
- if t != nil {
- if nn, ok := t.(*common.NonNull); ok {
- c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q is required and will not use the default value. Perhaps you meant to use type %q.", "$"+v.Name.Name, t, nn.OfType)
- }
-
- if ok, reason := validateValueType(opc, v.Default, t); !ok {
- c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q has invalid default value %s.\n%s", "$"+v.Name.Name, t, v.Default, reason)
- }
- }
- }
- }
-
- var entryPoint schema.NamedType
- switch op.Type {
- case query.Query:
- entryPoint = s.EntryPoints["query"]
- case query.Mutation:
- entryPoint = s.EntryPoints["mutation"]
- case query.Subscription:
- entryPoint = s.EntryPoints["subscription"]
- default:
- panic("unreachable")
- }
-
- validateSelectionSet(opc, op.Selections, entryPoint)
-
- fragUsed := make(map[*query.FragmentDecl]struct{})
- markUsedFragments(c, op.Selections, fragUsed)
- for frag := range fragUsed {
- fragUsedBy[frag] = append(fragUsedBy[frag], op)
- }
- }
-
- fragNames := make(nameSet)
- fragVisited := make(map[*query.FragmentDecl]struct{})
- for _, frag := range doc.Fragments {
- opc := &opContext{c, fragUsedBy[frag]}
-
- validateName(c, fragNames, frag.Name, "UniqueFragmentNames", "fragment")
- validateDirectives(opc, "FRAGMENT_DEFINITION", frag.Directives)
-
- t := unwrapType(resolveType(c, &frag.On))
- // continue even if t is nil
- if t != nil && !canBeFragment(t) {
- c.addErr(frag.On.Loc, "FragmentsOnCompositeTypes", "Fragment %q cannot condition on non composite type %q.", frag.Name.Name, t)
- continue
- }
-
- validateSelectionSet(opc, frag.Selections, t)
-
- if _, ok := fragVisited[frag]; !ok {
- detectFragmentCycle(c, frag.Selections, fragVisited, nil, map[string]int{frag.Name.Name: 0})
- }
- }
-
- for _, frag := range doc.Fragments {
- if len(fragUsedBy[frag]) == 0 {
- c.addErr(frag.Loc, "NoUnusedFragments", "Fragment %q is never used.", frag.Name.Name)
- }
- }
-
- for _, op := range doc.Operations {
- c.errs = append(c.errs, c.opErrs[op]...)
-
- opUsedVars := c.usedVars[op]
- for _, v := range op.Vars {
- if _, ok := opUsedVars[v]; !ok {
- opSuffix := ""
- if op.Name.Name != "" {
- opSuffix = fmt.Sprintf(" in operation %q", op.Name.Name)
- }
- c.addErr(v.Loc, "NoUnusedVariables", "Variable %q is never used%s.", "$"+v.Name.Name, opSuffix)
- }
- }
- }
-
- return c.errs
-}
-
-func validateSelectionSet(c *opContext, sels []query.Selection, t schema.NamedType) {
- for _, sel := range sels {
- validateSelection(c, sel, t)
- }
-
- for i, a := range sels {
- for _, b := range sels[i+1:] {
- c.validateOverlap(a, b, nil, nil)
- }
- }
-}
-
-func validateSelection(c *opContext, sel query.Selection, t schema.NamedType) {
- switch sel := sel.(type) {
- case *query.Field:
- validateDirectives(c, "FIELD", sel.Directives)
-
- fieldName := sel.Name.Name
- var f *schema.Field
- switch fieldName {
- case "__typename":
- f = &schema.Field{
- Name: "__typename",
- Type: c.schema.Types["String"],
- }
- case "__schema":
- f = &schema.Field{
- Name: "__schema",
- Type: c.schema.Types["__Schema"],
- }
- case "__type":
- f = &schema.Field{
- Name: "__type",
- Args: common.InputValueList{
- &common.InputValue{
- Name: common.Ident{Name: "name"},
- Type: &common.NonNull{OfType: c.schema.Types["String"]},
- },
- },
- Type: c.schema.Types["__Type"],
- }
- default:
- f = fields(t).Get(fieldName)
- if f == nil && t != nil {
- suggestion := makeSuggestion("Did you mean", fields(t).Names(), fieldName)
- c.addErr(sel.Alias.Loc, "FieldsOnCorrectType", "Cannot query field %q on type %q.%s", fieldName, t, suggestion)
- }
- }
- c.fieldMap[sel] = fieldInfo{sf: f, parent: t}
-
- validateArgumentLiterals(c, sel.Arguments)
- if f != nil {
- validateArgumentTypes(c, sel.Arguments, f.Args, sel.Alias.Loc,
- func() string { return fmt.Sprintf("field %q of type %q", fieldName, t) },
- func() string { return fmt.Sprintf("Field %q", fieldName) },
- )
- }
-
- var ft common.Type
- if f != nil {
- ft = f.Type
- sf := hasSubfields(ft)
- if sf && sel.Selections == nil {
- c.addErr(sel.Alias.Loc, "ScalarLeafs", "Field %q of type %q must have a selection of subfields. Did you mean \"%s { ... }\"?", fieldName, ft, fieldName)
- }
- if !sf && sel.Selections != nil {
- c.addErr(sel.SelectionSetLoc, "ScalarLeafs", "Field %q must not have a selection since type %q has no subfields.", fieldName, ft)
- }
- }
- if sel.Selections != nil {
- validateSelectionSet(c, sel.Selections, unwrapType(ft))
- }
-
- case *query.InlineFragment:
- validateDirectives(c, "INLINE_FRAGMENT", sel.Directives)
- if sel.On.Name != "" {
- fragTyp := unwrapType(resolveType(c.context, &sel.On))
- if fragTyp != nil && !compatible(t, fragTyp) {
- c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment cannot be spread here as objects of type %q can never be of type %q.", t, fragTyp)
- }
- t = fragTyp
- // continue even if t is nil
- }
- if t != nil && !canBeFragment(t) {
- c.addErr(sel.On.Loc, "FragmentsOnCompositeTypes", "Fragment cannot condition on non composite type %q.", t)
- return
- }
- validateSelectionSet(c, sel.Selections, unwrapType(t))
-
- case *query.FragmentSpread:
- validateDirectives(c, "FRAGMENT_SPREAD", sel.Directives)
- frag := c.doc.Fragments.Get(sel.Name.Name)
- if frag == nil {
- c.addErr(sel.Name.Loc, "KnownFragmentNames", "Unknown fragment %q.", sel.Name.Name)
- return
- }
- fragTyp := c.schema.Types[frag.On.Name]
- if !compatible(t, fragTyp) {
- c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment %q cannot be spread here as objects of type %q can never be of type %q.", frag.Name.Name, t, fragTyp)
- }
-
- default:
- panic("unreachable")
- }
-}
-
-func compatible(a, b common.Type) bool {
- for _, pta := range possibleTypes(a) {
- for _, ptb := range possibleTypes(b) {
- if pta == ptb {
- return true
- }
- }
- }
- return false
-}
-
-func possibleTypes(t common.Type) []*schema.Object {
- switch t := t.(type) {
- case *schema.Object:
- return []*schema.Object{t}
- case *schema.Interface:
- return t.PossibleTypes
- case *schema.Union:
- return t.PossibleTypes
- default:
- return nil
- }
-}
-
-func markUsedFragments(c *context, sels []query.Selection, fragUsed map[*query.FragmentDecl]struct{}) {
- for _, sel := range sels {
- switch sel := sel.(type) {
- case *query.Field:
- if sel.Selections != nil {
- markUsedFragments(c, sel.Selections, fragUsed)
- }
-
- case *query.InlineFragment:
- markUsedFragments(c, sel.Selections, fragUsed)
-
- case *query.FragmentSpread:
- frag := c.doc.Fragments.Get(sel.Name.Name)
- if frag == nil {
- return
- }
-
- if _, ok := fragUsed[frag]; ok {
- return
- }
- fragUsed[frag] = struct{}{}
- markUsedFragments(c, frag.Selections, fragUsed)
-
- default:
- panic("unreachable")
- }
- }
-}
-
-func detectFragmentCycle(c *context, sels []query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) {
- for _, sel := range sels {
- detectFragmentCycleSel(c, sel, fragVisited, spreadPath, spreadPathIndex)
- }
-}
-
-func detectFragmentCycleSel(c *context, sel query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) {
- switch sel := sel.(type) {
- case *query.Field:
- if sel.Selections != nil {
- detectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex)
- }
-
- case *query.InlineFragment:
- detectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex)
-
- case *query.FragmentSpread:
- frag := c.doc.Fragments.Get(sel.Name.Name)
- if frag == nil {
- return
- }
-
- spreadPath = append(spreadPath, sel)
- if i, ok := spreadPathIndex[frag.Name.Name]; ok {
- cyclePath := spreadPath[i:]
- via := ""
- if len(cyclePath) > 1 {
- names := make([]string, len(cyclePath)-1)
- for i, frag := range cyclePath[:len(cyclePath)-1] {
- names[i] = frag.Name.Name
- }
- via = " via " + strings.Join(names, ", ")
- }
-
- locs := make([]errors.Location, len(cyclePath))
- for i, frag := range cyclePath {
- locs[i] = frag.Loc
- }
- c.addErrMultiLoc(locs, "NoFragmentCycles", "Cannot spread fragment %q within itself%s.", frag.Name.Name, via)
- return
- }
-
- if _, ok := fragVisited[frag]; ok {
- return
- }
- fragVisited[frag] = struct{}{}
-
- spreadPathIndex[frag.Name.Name] = len(spreadPath)
- detectFragmentCycle(c, frag.Selections, fragVisited, spreadPath, spreadPathIndex)
- delete(spreadPathIndex, frag.Name.Name)
-
- default:
- panic("unreachable")
- }
-}
-
-func (c *context) validateOverlap(a, b query.Selection, reasons *[]string, locs *[]errors.Location) {
- if a == b {
- return
- }
-
- if _, ok := c.overlapValidated[selectionPair{a, b}]; ok {
- return
- }
- c.overlapValidated[selectionPair{a, b}] = struct{}{}
- c.overlapValidated[selectionPair{b, a}] = struct{}{}
-
- switch a := a.(type) {
- case *query.Field:
- switch b := b.(type) {
- case *query.Field:
- if b.Alias.Loc.Before(a.Alias.Loc) {
- a, b = b, a
- }
- if reasons2, locs2 := c.validateFieldOverlap(a, b); len(reasons2) != 0 {
- locs2 = append(locs2, a.Alias.Loc, b.Alias.Loc)
- if reasons == nil {
- c.addErrMultiLoc(locs2, "OverlappingFieldsCanBeMerged", "Fields %q conflict because %s. Use different aliases on the fields to fetch both if this was intentional.", a.Alias.Name, strings.Join(reasons2, " and "))
- return
- }
- for _, r := range reasons2 {
- *reasons = append(*reasons, fmt.Sprintf("subfields %q conflict because %s", a.Alias.Name, r))
- }
- *locs = append(*locs, locs2...)
- }
-
- case *query.InlineFragment:
- for _, sel := range b.Selections {
- c.validateOverlap(a, sel, reasons, locs)
- }
-
- case *query.FragmentSpread:
- if frag := c.doc.Fragments.Get(b.Name.Name); frag != nil {
- for _, sel := range frag.Selections {
- c.validateOverlap(a, sel, reasons, locs)
- }
- }
-
- default:
- panic("unreachable")
- }
-
- case *query.InlineFragment:
- for _, sel := range a.Selections {
- c.validateOverlap(sel, b, reasons, locs)
- }
-
- case *query.FragmentSpread:
- if frag := c.doc.Fragments.Get(a.Name.Name); frag != nil {
- for _, sel := range frag.Selections {
- c.validateOverlap(sel, b, reasons, locs)
- }
- }
-
- default:
- panic("unreachable")
- }
-}
-
-func (c *context) validateFieldOverlap(a, b *query.Field) ([]string, []errors.Location) {
- if a.Alias.Name != b.Alias.Name {
- return nil, nil
- }
-
- if asf := c.fieldMap[a].sf; asf != nil {
- if bsf := c.fieldMap[b].sf; bsf != nil {
- if !typesCompatible(asf.Type, bsf.Type) {
- return []string{fmt.Sprintf("they return conflicting types %s and %s", asf.Type, bsf.Type)}, nil
- }
- }
- }
-
- at := c.fieldMap[a].parent
- bt := c.fieldMap[b].parent
- if at == nil || bt == nil || at == bt {
- if a.Name.Name != b.Name.Name {
- return []string{fmt.Sprintf("%s and %s are different fields", a.Name.Name, b.Name.Name)}, nil
- }
-
- if argumentsConflict(a.Arguments, b.Arguments) {
- return []string{"they have differing arguments"}, nil
- }
- }
-
- var reasons []string
- var locs []errors.Location
- for _, a2 := range a.Selections {
- for _, b2 := range b.Selections {
- c.validateOverlap(a2, b2, &reasons, &locs)
- }
- }
- return reasons, locs
-}
-
-func argumentsConflict(a, b common.ArgumentList) bool {
- if len(a) != len(b) {
- return true
- }
- for _, argA := range a {
- valB, ok := b.Get(argA.Name.Name)
- if !ok || !reflect.DeepEqual(argA.Value.Value(nil), valB.Value(nil)) {
- return true
- }
- }
- return false
-}
-
-func fields(t common.Type) schema.FieldList {
- switch t := t.(type) {
- case *schema.Object:
- return t.Fields
- case *schema.Interface:
- return t.Fields
- default:
- return nil
- }
-}
-
-func unwrapType(t common.Type) schema.NamedType {
- if t == nil {
- return nil
- }
- for {
- switch t2 := t.(type) {
- case schema.NamedType:
- return t2
- case *common.List:
- t = t2.OfType
- case *common.NonNull:
- t = t2.OfType
- default:
- panic("unreachable")
- }
- }
-}
-
-func resolveType(c *context, t common.Type) common.Type {
- t2, err := common.ResolveType(t, c.schema.Resolve)
- if err != nil {
- c.errs = append(c.errs, err)
- }
- return t2
-}
-
-func validateDirectives(c *opContext, loc string, directives common.DirectiveList) {
- directiveNames := make(nameSet)
- for _, d := range directives {
- dirName := d.Name.Name
- validateNameCustomMsg(c.context, directiveNames, d.Name, "UniqueDirectivesPerLocation", func() string {
- return fmt.Sprintf("The directive %q can only be used once at this location.", dirName)
- })
-
- validateArgumentLiterals(c, d.Args)
-
- dd, ok := c.schema.Directives[dirName]
- if !ok {
- c.addErr(d.Name.Loc, "KnownDirectives", "Unknown directive %q.", dirName)
- continue
- }
-
- locOK := false
- for _, allowedLoc := range dd.Locs {
- if loc == allowedLoc {
- locOK = true
- break
- }
- }
- if !locOK {
- c.addErr(d.Name.Loc, "KnownDirectives", "Directive %q may not be used on %s.", dirName, loc)
- }
-
- validateArgumentTypes(c, d.Args, dd.Args, d.Name.Loc,
- func() string { return fmt.Sprintf("directive %q", "@"+dirName) },
- func() string { return fmt.Sprintf("Directive %q", "@"+dirName) },
- )
- }
-}
-
-type nameSet map[string]errors.Location
-
-func validateName(c *context, set nameSet, name common.Ident, rule string, kind string) {
- validateNameCustomMsg(c, set, name, rule, func() string {
- return fmt.Sprintf("There can be only one %s named %q.", kind, name.Name)
- })
-}
-
-func validateNameCustomMsg(c *context, set nameSet, name common.Ident, rule string, msg func() string) {
- if loc, ok := set[name.Name]; ok {
- c.addErrMultiLoc([]errors.Location{loc, name.Loc}, rule, msg())
- return
- }
- set[name.Name] = name.Loc
-}
-
-func validateArgumentTypes(c *opContext, args common.ArgumentList, argDecls common.InputValueList, loc errors.Location, owner1, owner2 func() string) {
- for _, selArg := range args {
- arg := argDecls.Get(selArg.Name.Name)
- if arg == nil {
- c.addErr(selArg.Name.Loc, "KnownArgumentNames", "Unknown argument %q on %s.", selArg.Name.Name, owner1())
- continue
- }
- value := selArg.Value
- if ok, reason := validateValueType(c, value, arg.Type); !ok {
- c.addErr(value.Location(), "ArgumentsOfCorrectType", "Argument %q has invalid value %s.\n%s", arg.Name.Name, value, reason)
- }
- }
- for _, decl := range argDecls {
- if _, ok := decl.Type.(*common.NonNull); ok {
- if _, ok := args.Get(decl.Name.Name); !ok {
- c.addErr(loc, "ProvidedNonNullArguments", "%s argument %q of type %q is required but not provided.", owner2(), decl.Name.Name, decl.Type)
- }
- }
- }
-}
-
-func validateArgumentLiterals(c *opContext, args common.ArgumentList) {
- argNames := make(nameSet)
- for _, arg := range args {
- validateName(c.context, argNames, arg.Name, "UniqueArgumentNames", "argument")
- validateLiteral(c, arg.Value)
- }
-}
-
-func validateLiteral(c *opContext, l common.Literal) {
- switch l := l.(type) {
- case *common.ObjectLit:
- fieldNames := make(nameSet)
- for _, f := range l.Fields {
- validateName(c.context, fieldNames, f.Name, "UniqueInputFieldNames", "input field")
- validateLiteral(c, f.Value)
- }
- case *common.ListLit:
- for _, entry := range l.Entries {
- validateLiteral(c, entry)
- }
- case *common.Variable:
- for _, op := range c.ops {
- v := op.Vars.Get(l.Name)
- if v == nil {
- byOp := ""
- if op.Name.Name != "" {
- byOp = fmt.Sprintf(" by operation %q", op.Name.Name)
- }
- c.opErrs[op] = append(c.opErrs[op], &errors.QueryError{
- Message: fmt.Sprintf("Variable %q is not defined%s.", "$"+l.Name, byOp),
- Locations: []errors.Location{l.Loc, op.Loc},
- Rule: "NoUndefinedVariables",
- })
- continue
- }
- c.usedVars[op][v] = struct{}{}
- }
- }
-}
-
-func validateValueType(c *opContext, v common.Literal, t common.Type) (bool, string) {
- if v, ok := v.(*common.Variable); ok {
- for _, op := range c.ops {
- if v2 := op.Vars.Get(v.Name); v2 != nil {
- t2, err := common.ResolveType(v2.Type, c.schema.Resolve)
- if _, ok := t2.(*common.NonNull); !ok && v2.Default != nil {
- t2 = &common.NonNull{OfType: t2}
- }
- if err == nil && !typeCanBeUsedAs(t2, t) {
- c.addErrMultiLoc([]errors.Location{v2.Loc, v.Loc}, "VariablesInAllowedPosition", "Variable %q of type %q used in position expecting type %q.", "$"+v.Name, t2, t)
- }
- }
- }
- return true, ""
- }
-
- if nn, ok := t.(*common.NonNull); ok {
- if isNull(v) {
- return false, fmt.Sprintf("Expected %q, found null.", t)
- }
- t = nn.OfType
- }
- if isNull(v) {
- return true, ""
- }
-
- switch t := t.(type) {
- case *schema.Scalar, *schema.Enum:
- if lit, ok := v.(*common.BasicLit); ok {
- if validateBasicLit(lit, t) {
- return true, ""
- }
- } else {
- // custom complex scalars will be validated when unmarshaling
- return true, ""
- }
-
- case *common.List:
- list, ok := v.(*common.ListLit)
- if !ok {
- return validateValueType(c, v, t.OfType) // single value instead of list
- }
- for i, entry := range list.Entries {
- if ok, reason := validateValueType(c, entry, t.OfType); !ok {
- return false, fmt.Sprintf("In element #%d: %s", i, reason)
- }
- }
- return true, ""
-
- case *schema.InputObject:
- v, ok := v.(*common.ObjectLit)
- if !ok {
- return false, fmt.Sprintf("Expected %q, found not an object.", t)
- }
- for _, f := range v.Fields {
- name := f.Name.Name
- iv := t.Values.Get(name)
- if iv == nil {
- return false, fmt.Sprintf("In field %q: Unknown field.", name)
- }
- if ok, reason := validateValueType(c, f.Value, iv.Type); !ok {
- return false, fmt.Sprintf("In field %q: %s", name, reason)
- }
- }
- for _, iv := range t.Values {
- found := false
- for _, f := range v.Fields {
- if f.Name.Name == iv.Name.Name {
- found = true
- break
- }
- }
- if !found {
- if _, ok := iv.Type.(*common.NonNull); ok && iv.Default == nil {
- return false, fmt.Sprintf("In field %q: Expected %q, found null.", iv.Name.Name, iv.Type)
- }
- }
- }
- return true, ""
- }
-
- return false, fmt.Sprintf("Expected type %q, found %s.", t, v)
-}
-
-func validateBasicLit(v *common.BasicLit, t common.Type) bool {
- switch t := t.(type) {
- case *schema.Scalar:
- switch t.Name {
- case "Int":
- if v.Type != scanner.Int {
- return false
- }
- f, err := strconv.ParseFloat(v.Text, 64)
- if err != nil {
- panic(err)
- }
- return f >= math.MinInt32 && f <= math.MaxInt32
- case "Float":
- return v.Type == scanner.Int || v.Type == scanner.Float
- case "String":
- return v.Type == scanner.String
- case "Boolean":
- return v.Type == scanner.Ident && (v.Text == "true" || v.Text == "false")
- case "ID":
- return v.Type == scanner.Int || v.Type == scanner.String
- default:
- //TODO: Type-check against expected type by Unmarshaling
- return true
- }
-
- case *schema.Enum:
- if v.Type != scanner.Ident {
- return false
- }
- for _, option := range t.Values {
- if option.Name == v.Text {
- return true
- }
- }
- return false
- }
-
- return false
-}
-
-func canBeFragment(t common.Type) bool {
- switch t.(type) {
- case *schema.Object, *schema.Interface, *schema.Union:
- return true
- default:
- return false
- }
-}
-
-func canBeInput(t common.Type) bool {
- switch t := t.(type) {
- case *schema.InputObject, *schema.Scalar, *schema.Enum:
- return true
- case *common.List:
- return canBeInput(t.OfType)
- case *common.NonNull:
- return canBeInput(t.OfType)
- default:
- return false
- }
-}
-
-func hasSubfields(t common.Type) bool {
- switch t := t.(type) {
- case *schema.Object, *schema.Interface, *schema.Union:
- return true
- case *common.List:
- return hasSubfields(t.OfType)
- case *common.NonNull:
- return hasSubfields(t.OfType)
- default:
- return false
- }
-}
-
-func isLeaf(t common.Type) bool {
- switch t.(type) {
- case *schema.Scalar, *schema.Enum:
- return true
- default:
- return false
- }
-}
-
-func isNull(lit interface{}) bool {
- _, ok := lit.(*common.NullLit)
- return ok
-}
-
-func typesCompatible(a, b common.Type) bool {
- al, aIsList := a.(*common.List)
- bl, bIsList := b.(*common.List)
- if aIsList || bIsList {
- return aIsList && bIsList && typesCompatible(al.OfType, bl.OfType)
- }
-
- ann, aIsNN := a.(*common.NonNull)
- bnn, bIsNN := b.(*common.NonNull)
- if aIsNN || bIsNN {
- return aIsNN && bIsNN && typesCompatible(ann.OfType, bnn.OfType)
- }
-
- if isLeaf(a) || isLeaf(b) {
- return a == b
- }
-
- return true
-}
-
-func typeCanBeUsedAs(t, as common.Type) bool {
- nnT, okT := t.(*common.NonNull)
- if okT {
- t = nnT.OfType
- }
-
- nnAs, okAs := as.(*common.NonNull)
- if okAs {
- as = nnAs.OfType
- if !okT {
- return false // nullable can not be used as non-null
- }
- }
-
- if t == as {
- return true
- }
-
- if lT, ok := t.(*common.List); ok {
- if lAs, ok := as.(*common.List); ok {
- return typeCanBeUsedAs(lT.OfType, lAs.OfType)
- }
- }
- return false
-}
diff --git a/vendor/github.com/vektah/gqlparser/.gitignore b/vendor/github.com/vektah/gqlparser/.gitignore
new file mode 100644
index 00000000..877392a7
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/.gitignore
@@ -0,0 +1,5 @@
+/vendor
+/validator/imported/node_modules
+/validator/imported/graphql-js
+
+.idea/
diff --git a/vendor/github.com/vektah/gqlparser/.gometalinter.json b/vendor/github.com/vektah/gqlparser/.gometalinter.json
new file mode 100644
index 00000000..e4e00223
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/.gometalinter.json
@@ -0,0 +1,13 @@
+{
+ "sort": ["path"],
+ "Deadline": "5m",
+ "Linters": {
+ "errcheck": {
+ "Command": "errcheck -abspath -ignore '[rR]ead|[wW]rite|Close'",
+ "Pattern": "PATH:LINE:COL:MESSAGE",
+ "InstallFrom": "github.com/kisielk/errcheck",
+ "PartitionStrategy": "packages"
+ }
+ },
+ "Disable": ["golint","gocyclo", "goconst", "gas", "interfacer", "vet","gosec"]
+}
diff --git a/vendor/github.com/vektah/gqlparser/LICENSE b/vendor/github.com/vektah/gqlparser/LICENSE
new file mode 100644
index 00000000..1221b9d3
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2018 Adam Scarr
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE. \ No newline at end of file
diff --git a/vendor/github.com/vektah/gqlparser/ast/argmap.go b/vendor/github.com/vektah/gqlparser/ast/argmap.go
new file mode 100644
index 00000000..43f6a3d6
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/argmap.go
@@ -0,0 +1,37 @@
+package ast
+
+func arg2map(defs ArgumentDefinitionList, args ArgumentList, vars map[string]interface{}) map[string]interface{} {
+ result := map[string]interface{}{}
+ var err error
+
+ for _, argDef := range defs {
+ var val interface{}
+ var hasValue bool
+
+ if argValue := args.ForName(argDef.Name); argValue != nil {
+ if argValue.Value.Kind == Variable {
+ val, hasValue = vars[argValue.Value.Raw]
+ } else {
+ val, err = argValue.Value.Value(vars)
+ if err != nil {
+ panic(err)
+ }
+ hasValue = true
+ }
+ }
+
+ if !hasValue && argDef.DefaultValue != nil {
+ val, err = argDef.DefaultValue.Value(vars)
+ if err != nil {
+ panic(err)
+ }
+ hasValue = true
+ }
+
+ if hasValue {
+ result[argDef.Name] = val
+ }
+ }
+
+ return result
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/collections.go b/vendor/github.com/vektah/gqlparser/ast/collections.go
new file mode 100644
index 00000000..6bf67297
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/collections.go
@@ -0,0 +1,138 @@
+package ast
+
+type FieldList []*FieldDefinition
+
+func (l FieldList) ForName(name string) *FieldDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type EnumValueList []*EnumValueDefinition
+
+func (l EnumValueList) ForName(name string) *EnumValueDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type DirectiveList []*Directive
+
+func (l DirectiveList) ForName(name string) *Directive {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type OperationList []*OperationDefinition
+
+func (l OperationList) ForName(name string) *OperationDefinition {
+ if name == "" && len(l) == 1 {
+ return l[0]
+ }
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type FragmentDefinitionList []*FragmentDefinition
+
+func (l FragmentDefinitionList) ForName(name string) *FragmentDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type VariableDefinitionList []*VariableDefinition
+
+func (l VariableDefinitionList) ForName(name string) *VariableDefinition {
+ for _, it := range l {
+ if it.Variable == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type ArgumentList []*Argument
+
+func (l ArgumentList) ForName(name string) *Argument {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type ArgumentDefinitionList []*ArgumentDefinition
+
+func (l ArgumentDefinitionList) ForName(name string) *ArgumentDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type SchemaDefinitionList []*SchemaDefinition
+
+type DirectiveDefinitionList []*DirectiveDefinition
+
+func (l DirectiveDefinitionList) ForName(name string) *DirectiveDefinition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type DefinitionList []*Definition
+
+func (l DefinitionList) ForName(name string) *Definition {
+ for _, it := range l {
+ if it.Name == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type OperationTypeDefinitionList []*OperationTypeDefinition
+
+func (l OperationTypeDefinitionList) ForType(name string) *OperationTypeDefinition {
+ for _, it := range l {
+ if it.Type == name {
+ return it
+ }
+ }
+ return nil
+}
+
+type ChildValueList []*ChildValue
+
+func (v ChildValueList) ForName(name string) *Value {
+ for _, f := range v {
+ if f.Name == name {
+ return f.Value
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/definition.go b/vendor/github.com/vektah/gqlparser/ast/definition.go
new file mode 100644
index 00000000..74f4ece5
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/definition.go
@@ -0,0 +1,92 @@
+package ast
+
+type DefinitionKind string
+
+const (
+ Scalar DefinitionKind = "SCALAR"
+ Object DefinitionKind = "OBJECT"
+ Interface DefinitionKind = "INTERFACE"
+ Union DefinitionKind = "UNION"
+ Enum DefinitionKind = "ENUM"
+ InputObject DefinitionKind = "INPUT_OBJECT"
+)
+
+// ObjectDefinition is the core type definition object, it includes all of the definable types
+// but does *not* cover schema or directives.
+//
+// @vektah: Javascript implementation has different types for all of these, but they are
+// more similar than different and don't define any behaviour. I think this style of
+// "some hot" struct works better, at least for go.
+//
+// Type extensions are also represented by this same struct.
+type Definition struct {
+ Kind DefinitionKind
+ Description string
+ Name string
+ Directives DirectiveList
+ Interfaces []string // object and input object
+ Fields FieldList // object and input object
+ Types []string // union
+ EnumValues EnumValueList // enum
+
+ Position *Position `dump:"-"`
+}
+
+func (d *Definition) IsLeafType() bool {
+ return d.Kind == Enum || d.Kind == Scalar
+}
+
+func (d *Definition) IsAbstractType() bool {
+ return d.Kind == Interface || d.Kind == Union
+}
+
+func (d *Definition) IsCompositeType() bool {
+ return d.Kind == Object || d.Kind == Interface || d.Kind == Union
+}
+
+func (d *Definition) IsInputType() bool {
+ return d.Kind == Scalar || d.Kind == Enum || d.Kind == InputObject
+}
+
+func (d *Definition) OneOf(types ...string) bool {
+ for _, t := range types {
+ if d.Name == t {
+ return true
+ }
+ }
+ return false
+}
+
+type FieldDefinition struct {
+ Description string
+ Name string
+ Arguments ArgumentDefinitionList // only for objects
+ DefaultValue *Value // only for input objects
+ Type *Type
+ Directives DirectiveList
+ Position *Position `dump:"-"`
+}
+
+type ArgumentDefinition struct {
+ Description string
+ Name string
+ DefaultValue *Value
+ Type *Type
+ Directives DirectiveList
+ Position *Position `dump:"-"`
+}
+
+type EnumValueDefinition struct {
+ Description string
+ Name string
+ Directives DirectiveList
+ Position *Position `dump:"-"`
+}
+
+type DirectiveDefinition struct {
+ Description string
+ Name string
+ Arguments ArgumentDefinitionList
+ Locations []DirectiveLocation
+ Position *Position `dump:"-"`
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/directive.go b/vendor/github.com/vektah/gqlparser/ast/directive.go
new file mode 100644
index 00000000..9b07c92a
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/directive.go
@@ -0,0 +1,42 @@
+package ast
+
+type DirectiveLocation string
+
+const (
+ // Executable
+ LocationQuery DirectiveLocation = `QUERY`
+ LocationMutation DirectiveLocation = `MUTATION`
+ LocationSubscription DirectiveLocation = `SUBSCRIPTION`
+ LocationField DirectiveLocation = `FIELD`
+ LocationFragmentDefinition DirectiveLocation = `FRAGMENT_DEFINITION`
+ LocationFragmentSpread DirectiveLocation = `FRAGMENT_SPREAD`
+ LocationInlineFragment DirectiveLocation = `INLINE_FRAGMENT`
+
+ // Type System
+ LocationSchema DirectiveLocation = `SCHEMA`
+ LocationScalar DirectiveLocation = `SCALAR`
+ LocationObject DirectiveLocation = `OBJECT`
+ LocationFieldDefinition DirectiveLocation = `FIELD_DEFINITION`
+ LocationArgumentDefinition DirectiveLocation = `ARGUMENT_DEFINITION`
+ LocationInterface DirectiveLocation = `INTERFACE`
+ LocationUnion DirectiveLocation = `UNION`
+ LocationEnum DirectiveLocation = `ENUM`
+ LocationEnumValue DirectiveLocation = `ENUM_VALUE`
+ LocationInputObject DirectiveLocation = `INPUT_OBJECT`
+ LocationInputFieldDefinition DirectiveLocation = `INPUT_FIELD_DEFINITION`
+)
+
+type Directive struct {
+ Name string
+ Arguments ArgumentList
+ Position *Position `dump:"-"`
+
+ // Requires validation
+ ParentDefinition *Definition
+ Definition *DirectiveDefinition
+ Location DirectiveLocation
+}
+
+func (d *Directive) ArgumentMap(vars map[string]interface{}) map[string]interface{} {
+ return arg2map(d.Definition.Arguments, d.Arguments, vars)
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/document.go b/vendor/github.com/vektah/gqlparser/ast/document.go
new file mode 100644
index 00000000..b7657d62
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/document.go
@@ -0,0 +1,65 @@
+package ast
+
+type QueryDocument struct {
+ Operations OperationList
+ Fragments FragmentDefinitionList
+ Position *Position `dump:"-"`
+}
+
+type SchemaDocument struct {
+ Schema SchemaDefinitionList
+ SchemaExtension SchemaDefinitionList
+ Directives DirectiveDefinitionList
+ Definitions DefinitionList
+ Extensions DefinitionList
+ Position *Position `dump:"-"`
+}
+
+func (d *SchemaDocument) Merge(other *SchemaDocument) {
+ d.Schema = append(d.Schema, other.Schema...)
+ d.SchemaExtension = append(d.SchemaExtension, other.SchemaExtension...)
+ d.Directives = append(d.Directives, other.Directives...)
+ d.Definitions = append(d.Definitions, other.Definitions...)
+ d.Extensions = append(d.Extensions, other.Extensions...)
+}
+
+type Schema struct {
+ Query *Definition
+ Mutation *Definition
+ Subscription *Definition
+
+ Types map[string]*Definition
+ Directives map[string]*DirectiveDefinition
+
+ PossibleTypes map[string][]*Definition
+}
+
+func (s *Schema) AddPossibleType(name string, def *Definition) {
+ s.PossibleTypes[name] = append(s.PossibleTypes[name], def)
+}
+
+// GetPossibleTypes will enumerate all the definitions for a given interface or union
+func (s *Schema) GetPossibleTypes(def *Definition) []*Definition {
+ if def.Kind == Union {
+ var defs []*Definition
+ for _, t := range def.Types {
+ defs = append(defs, s.Types[t])
+ }
+ return defs
+ }
+
+ return s.PossibleTypes[def.Name]
+}
+
+type SchemaDefinition struct {
+ Description string
+ Directives DirectiveList
+ OperationTypes OperationTypeDefinitionList
+ Position *Position `dump:"-"`
+}
+
+type OperationTypeDefinition struct {
+ Operation Operation
+ Type string
+ Position *Position `dump:"-"`
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/dumper.go b/vendor/github.com/vektah/gqlparser/ast/dumper.go
new file mode 100644
index 00000000..dbb7a7ef
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/dumper.go
@@ -0,0 +1,159 @@
+package ast
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+)
+
+// Dump turns ast into a stable string format for assertions in tests
+func Dump(i interface{}) string {
+ v := reflect.ValueOf(i)
+
+ d := dumper{Buffer: &bytes.Buffer{}}
+ d.dump(v)
+
+ return d.String()
+}
+
+type dumper struct {
+ *bytes.Buffer
+ indent int
+}
+
+type Dumpable interface {
+ Dump() string
+}
+
+func (d *dumper) dump(v reflect.Value) {
+ if dumpable, isDumpable := v.Interface().(Dumpable); isDumpable {
+ d.WriteString(dumpable.Dump())
+ return
+ }
+ switch v.Kind() {
+ case reflect.Bool:
+ if v.Bool() {
+ d.WriteString("true")
+ } else {
+ d.WriteString("false")
+ }
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ d.WriteString(fmt.Sprintf("%d", v.Int()))
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ d.WriteString(fmt.Sprintf("%d", v.Uint()))
+
+ case reflect.Float32, reflect.Float64:
+ d.WriteString(fmt.Sprintf("%.2f", v.Float()))
+
+ case reflect.String:
+ if v.Type().Name() != "string" {
+ d.WriteString(v.Type().Name() + "(" + strconv.Quote(v.String()) + ")")
+ } else {
+ d.WriteString(strconv.Quote(v.String()))
+ }
+
+ case reflect.Array, reflect.Slice:
+ d.dumpArray(v)
+
+ case reflect.Interface, reflect.Ptr:
+ d.dumpPtr(v)
+
+ case reflect.Struct:
+ d.dumpStruct(v)
+
+ default:
+ panic(fmt.Errorf("unsupported kind: %s\n buf: %s", v.Kind().String(), d.String()))
+ }
+}
+
+func (d *dumper) writeIndent() {
+ d.Buffer.WriteString(strings.Repeat(" ", d.indent))
+}
+
+func (d *dumper) nl() {
+ d.Buffer.WriteByte('\n')
+ d.writeIndent()
+}
+
+func typeName(t reflect.Type) string {
+ if t.Kind() == reflect.Ptr {
+ return typeName(t.Elem())
+ }
+ return t.Name()
+}
+
+func (d *dumper) dumpArray(v reflect.Value) {
+ d.WriteString("[" + typeName(v.Type().Elem()) + "]")
+
+ for i := 0; i < v.Len(); i++ {
+ d.nl()
+ d.WriteString("- ")
+ d.indent++
+ d.dump(v.Index(i))
+ d.indent--
+ }
+}
+
+func (d *dumper) dumpStruct(v reflect.Value) {
+ d.WriteString("<" + v.Type().Name() + ">")
+ d.indent++
+
+ typ := v.Type()
+ for i := 0; i < v.NumField(); i++ {
+ f := v.Field(i)
+ if typ.Field(i).Tag.Get("dump") == "-" {
+ continue
+ }
+
+ if isZero(f) {
+ continue
+ }
+ d.nl()
+ d.WriteString(typ.Field(i).Name)
+ d.WriteString(": ")
+ d.dump(v.Field(i))
+ }
+
+ d.indent--
+}
+
+func isZero(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.Ptr, reflect.Interface:
+ return v.IsNil()
+ case reflect.Func, reflect.Map:
+ return v.IsNil()
+
+ case reflect.Array, reflect.Slice:
+ if v.IsNil() {
+ return true
+ }
+ z := true
+ for i := 0; i < v.Len(); i++ {
+ z = z && isZero(v.Index(i))
+ }
+ return z
+ case reflect.Struct:
+ z := true
+ for i := 0; i < v.NumField(); i++ {
+ z = z && isZero(v.Field(i))
+ }
+ return z
+ case reflect.String:
+ return v.String() == ""
+ }
+
+ // Compare other types directly:
+ return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()))
+}
+
+func (d *dumper) dumpPtr(v reflect.Value) {
+ if v.IsNil() {
+ d.WriteString("nil")
+ return
+ }
+ d.dump(v.Elem())
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/fragment.go b/vendor/github.com/vektah/gqlparser/ast/fragment.go
new file mode 100644
index 00000000..57ab56c7
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/fragment.go
@@ -0,0 +1,38 @@
+package ast
+
+type FragmentSpread struct {
+ Name string
+ Directives DirectiveList
+
+ // Require validation
+ ObjectDefinition *Definition
+ Definition *FragmentDefinition
+
+ Position *Position `dump:"-"`
+}
+
+type InlineFragment struct {
+ TypeCondition string
+ Directives DirectiveList
+ SelectionSet SelectionSet
+
+ // Require validation
+ ObjectDefinition *Definition
+
+ Position *Position `dump:"-"`
+}
+
+type FragmentDefinition struct {
+ Name string
+ // Note: fragment variable definitions are experimental and may be changed
+ // or removed in the future.
+ VariableDefinition VariableDefinitionList
+ TypeCondition string
+ Directives DirectiveList
+ SelectionSet SelectionSet
+
+ // Require validation
+ Definition *Definition
+
+ Position *Position `dump:"-"`
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/operation.go b/vendor/github.com/vektah/gqlparser/ast/operation.go
new file mode 100644
index 00000000..03e916a0
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/operation.go
@@ -0,0 +1,29 @@
+package ast
+
+type Operation string
+
+const (
+ Query Operation = "query"
+ Mutation Operation = "mutation"
+ Subscription Operation = "subscription"
+)
+
+type OperationDefinition struct {
+ Operation Operation
+ Name string
+ VariableDefinitions VariableDefinitionList
+ Directives DirectiveList
+ SelectionSet SelectionSet
+ Position *Position `dump:"-"`
+}
+
+type VariableDefinition struct {
+ Variable string
+ Type *Type
+ DefaultValue *Value
+ Position *Position `dump:"-"`
+
+ // Requires validation
+ Definition *Definition
+ Used bool `dump:"-"`
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/selection.go b/vendor/github.com/vektah/gqlparser/ast/selection.go
new file mode 100644
index 00000000..159db844
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/selection.go
@@ -0,0 +1,39 @@
+package ast
+
+type SelectionSet []Selection
+
+type Selection interface {
+ isSelection()
+ GetPosition() *Position
+}
+
+func (*Field) isSelection() {}
+func (*FragmentSpread) isSelection() {}
+func (*InlineFragment) isSelection() {}
+
+func (s *Field) GetPosition() *Position { return s.Position }
+func (s *FragmentSpread) GetPosition() *Position { return s.Position }
+func (s *InlineFragment) GetPosition() *Position { return s.Position }
+
+type Field struct {
+ Alias string
+ Name string
+ Arguments ArgumentList
+ Directives DirectiveList
+ SelectionSet SelectionSet
+ Position *Position `dump:"-"`
+
+ // Require validation
+ Definition *FieldDefinition
+ ObjectDefinition *Definition
+}
+
+type Argument struct {
+ Name string
+ Value *Value
+ Position *Position `dump:"-"`
+}
+
+func (f *Field) ArgumentMap(vars map[string]interface{}) map[string]interface{} {
+ return arg2map(f.Definition.Arguments, f.Arguments, vars)
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/source.go b/vendor/github.com/vektah/gqlparser/ast/source.go
new file mode 100644
index 00000000..9d44dd9c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/source.go
@@ -0,0 +1,14 @@
+package ast
+
+type Source struct {
+ Name string
+ Input string
+}
+
+type Position struct {
+ Start int // The starting position, in runes, of this token in the input.
+ End int // The end position, in runes, of this token in the input.
+ Line int // The line number at the start of this item.
+ Column int // The line number at the start of this item.
+ Src *Source // The source document this token belongs to
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/type.go b/vendor/github.com/vektah/gqlparser/ast/type.go
new file mode 100644
index 00000000..9577fdb4
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/type.go
@@ -0,0 +1,68 @@
+package ast
+
+func NonNullNamedType(named string, pos *Position) *Type {
+ return &Type{NamedType: named, NonNull: true, Position: pos}
+}
+
+func NamedType(named string, pos *Position) *Type {
+ return &Type{NamedType: named, NonNull: false, Position: pos}
+}
+
+func NonNullListType(elem *Type, pos *Position) *Type {
+ return &Type{Elem: elem, NonNull: true, Position: pos}
+}
+
+func ListType(elem *Type, pos *Position) *Type {
+ return &Type{Elem: elem, NonNull: false, Position: pos}
+}
+
+type Type struct {
+ NamedType string
+ Elem *Type
+ NonNull bool
+ Position *Position `dump:"-"`
+}
+
+func (t *Type) Name() string {
+ if t.NamedType != "" {
+ return t.NamedType
+ }
+
+ return t.Elem.Name()
+}
+
+func (t *Type) String() string {
+ nn := ""
+ if t.NonNull {
+ nn = "!"
+ }
+ if t.NamedType != "" {
+ return t.NamedType + nn
+ }
+
+ return "[" + t.Elem.String() + "]" + nn
+}
+
+func (t *Type) IsCompatible(other *Type) bool {
+ if t.NamedType != other.NamedType {
+ return false
+ }
+
+ if t.Elem != nil && other.Elem == nil {
+ return false
+ }
+
+ if t.Elem != nil && !t.Elem.IsCompatible(other.Elem) {
+ return false
+ }
+
+ if other.NonNull {
+ return t.NonNull
+ }
+
+ return true
+}
+
+func (v *Type) Dump() string {
+ return v.String()
+}
diff --git a/vendor/github.com/vektah/gqlparser/ast/value.go b/vendor/github.com/vektah/gqlparser/ast/value.go
new file mode 100644
index 00000000..3168b266
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/ast/value.go
@@ -0,0 +1,120 @@
+package ast
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+type ValueKind int
+
+const (
+ Variable ValueKind = iota
+ IntValue
+ FloatValue
+ StringValue
+ BlockValue
+ BooleanValue
+ NullValue
+ EnumValue
+ ListValue
+ ObjectValue
+)
+
+type Value struct {
+ Raw string
+ Children ChildValueList
+ Kind ValueKind
+ Position *Position `dump:"-"`
+
+ // Require validation
+ Definition *Definition
+ VariableDefinition *VariableDefinition
+ ExpectedType *Type
+}
+
+type ChildValue struct {
+ Name string
+ Value *Value
+ Position *Position `dump:"-"`
+}
+
+func (v *Value) Value(vars map[string]interface{}) (interface{}, error) {
+ if v == nil {
+ return nil, nil
+ }
+ switch v.Kind {
+ case Variable:
+ if value, ok := vars[v.Raw]; ok {
+ return value, nil
+ }
+ if v.VariableDefinition != nil && v.VariableDefinition.DefaultValue != nil {
+ return v.VariableDefinition.DefaultValue.Value(vars)
+ }
+ return nil, nil
+ case IntValue:
+ return strconv.ParseInt(v.Raw, 10, 64)
+ case FloatValue:
+ return strconv.ParseFloat(v.Raw, 64)
+ case StringValue, BlockValue, EnumValue:
+ return v.Raw, nil
+ case BooleanValue:
+ return strconv.ParseBool(v.Raw)
+ case NullValue:
+ return nil, nil
+ case ListValue:
+ var val []interface{}
+ for _, elem := range v.Children {
+ elemVal, err := elem.Value.Value(vars)
+ if err != nil {
+ return val, err
+ }
+ val = append(val, elemVal)
+ }
+ return val, nil
+ case ObjectValue:
+ val := map[string]interface{}{}
+ for _, elem := range v.Children {
+ elemVal, err := elem.Value.Value(vars)
+ if err != nil {
+ return val, err
+ }
+ val[elem.Name] = elemVal
+ }
+ return val, nil
+ default:
+ panic(fmt.Errorf("unknown value kind %d", v.Kind))
+ }
+}
+
+func (v *Value) String() string {
+ if v == nil {
+ return "<nil>"
+ }
+ switch v.Kind {
+ case Variable:
+ return "$" + v.Raw
+ case IntValue, FloatValue, EnumValue, BooleanValue, NullValue:
+ return v.Raw
+ case StringValue, BlockValue:
+ return strconv.Quote(v.Raw)
+ case ListValue:
+ var val []string
+ for _, elem := range v.Children {
+ val = append(val, elem.Value.String())
+ }
+ return "[" + strings.Join(val, ",") + "]"
+ case ObjectValue:
+ var val []string
+ for _, elem := range v.Children {
+ val = append(val, strconv.Quote(elem.Name)+":"+elem.Value.String())
+ }
+ return "{" + strings.Join(val, ",") + "}"
+ default:
+ panic(fmt.Errorf("unknown value kind %d", v.Kind))
+ }
+}
+
+func (v *Value) Dump() string {
+ return v.String()
+}
diff --git a/vendor/github.com/vektah/gqlparser/gqlerror/error.go b/vendor/github.com/vektah/gqlparser/gqlerror/error.go
new file mode 100644
index 00000000..c4c0847a
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/gqlerror/error.go
@@ -0,0 +1,133 @@
+package gqlerror
+
+import (
+ "bytes"
+ "fmt"
+ "strconv"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+// Error is the standard graphql error type described in https://facebook.github.io/graphql/draft/#sec-Errors
+type Error struct {
+ Message string `json:"message"`
+ Path []interface{} `json:"path,omitempty"`
+ Locations []Location `json:"locations,omitempty"`
+ Extensions map[string]interface{} `json:"extensions,omitempty"`
+ Rule string `json:"-"`
+}
+
+func (err *Error) SetFile(file string) {
+ if file == "" {
+ return
+ }
+ if err.Extensions == nil {
+ err.Extensions = map[string]interface{}{}
+ }
+
+ err.Extensions["file"] = file
+}
+
+type Location struct {
+ Line int `json:"line,omitempty"`
+ Column int `json:"column,omitempty"`
+}
+
+type List []*Error
+
+func (err *Error) Error() string {
+ var res bytes.Buffer
+ if err == nil {
+ return ""
+ }
+ filename, _ := err.Extensions["file"].(string)
+ if filename == "" {
+ filename = "input"
+ }
+ res.WriteString(filename)
+
+ if len(err.Locations) > 0 {
+ res.WriteByte(':')
+ res.WriteString(strconv.Itoa(err.Locations[0].Line))
+ }
+
+ res.WriteString(": ")
+ if ps := err.pathString(); ps != "" {
+ res.WriteString(ps)
+ res.WriteByte(' ')
+ }
+
+ res.WriteString(err.Message)
+
+ return res.String()
+}
+
+func (err Error) pathString() string {
+ var str bytes.Buffer
+ for i, v := range err.Path {
+
+ switch v := v.(type) {
+ case int, int64:
+ str.WriteString(fmt.Sprintf("[%d]", v))
+ default:
+ if i != 0 {
+ str.WriteByte('.')
+ }
+ str.WriteString(fmt.Sprint(v))
+ }
+ }
+ return str.String()
+}
+
+func (errs List) Error() string {
+ var buf bytes.Buffer
+ for _, err := range errs {
+ buf.WriteString(err.Error())
+ buf.WriteByte('\n')
+ }
+ return buf.String()
+}
+
+func WrapPath(path []interface{}, err error) *Error {
+ return &Error{
+ Message: err.Error(),
+ Path: path,
+ }
+}
+
+func Errorf(message string, args ...interface{}) *Error {
+ return &Error{
+ Message: fmt.Sprintf(message, args...),
+ }
+}
+
+func ErrorPathf(path []interface{}, message string, args ...interface{}) *Error {
+ return &Error{
+ Message: fmt.Sprintf(message, args...),
+ Path: path,
+ }
+}
+
+func ErrorPosf(pos *ast.Position, message string, args ...interface{}) *Error {
+ return ErrorLocf(
+ pos.Src.Name,
+ pos.Line,
+ pos.Column,
+ message,
+ args...,
+ )
+}
+
+func ErrorLocf(file string, line int, col int, message string, args ...interface{}) *Error {
+ var extensions map[string]interface{}
+ if file != "" {
+ extensions = map[string]interface{}{"file": file}
+ }
+ return &Error{
+ Message: fmt.Sprintf(message, args...),
+ Extensions: extensions,
+ Locations: []Location{
+ {Line: line, Column: col},
+ },
+ }
+}
diff --git a/vendor/github.com/vektah/gqlparser/gqlparser.go b/vendor/github.com/vektah/gqlparser/gqlparser.go
new file mode 100644
index 00000000..71e46407
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/gqlparser.go
@@ -0,0 +1,42 @@
+package gqlparser
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/parser"
+ "github.com/vektah/gqlparser/validator"
+ _ "github.com/vektah/gqlparser/validator/rules"
+)
+
+func LoadSchema(str ...*ast.Source) (*ast.Schema, *gqlerror.Error) {
+ return validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...)
+}
+
+func MustLoadSchema(str ...*ast.Source) *ast.Schema {
+ s, err := validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...)
+ if err != nil {
+ panic(err)
+ }
+ return s
+}
+
+func LoadQuery(schema *ast.Schema, str string) (*ast.QueryDocument, gqlerror.List) {
+ query, err := parser.ParseQuery(&ast.Source{Input: str})
+ if err != nil {
+ return nil, gqlerror.List{err}
+ }
+ errs := validator.Validate(schema, query)
+ if errs != nil {
+ return nil, errs
+ }
+
+ return query, nil
+}
+
+func MustLoadQuery(schema *ast.Schema, str string) *ast.QueryDocument {
+ q, err := LoadQuery(schema, str)
+ if err != nil {
+ panic(err)
+ }
+ return q
+}
diff --git a/vendor/github.com/vektah/gqlparser/lexer/blockstring.go b/vendor/github.com/vektah/gqlparser/lexer/blockstring.go
new file mode 100644
index 00000000..4065a610
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/lexer/blockstring.go
@@ -0,0 +1,58 @@
+package lexer
+
+import (
+ "math"
+ "strings"
+)
+
+// blockStringValue produces the value of a block string from its parsed raw value, similar to
+// Coffeescript's block string, Python's docstring trim or Ruby's strip_heredoc.
+//
+// This implements the GraphQL spec's BlockStringValue() static algorithm.
+func blockStringValue(raw string) string {
+ lines := strings.Split(raw, "\n")
+
+ commonIndent := math.MaxInt32
+ for _, line := range lines {
+ indent := leadingWhitespace(line)
+ if indent < len(line) && indent < commonIndent {
+ commonIndent = indent
+ if commonIndent == 0 {
+ break
+ }
+ }
+ }
+
+ if commonIndent != math.MaxInt32 && len(lines) > 0 {
+ for i := 1; i < len(lines); i++ {
+ if len(lines[i]) < commonIndent {
+ lines[i] = ""
+ } else {
+ lines[i] = lines[i][commonIndent:]
+ }
+ }
+ }
+
+ start := 0
+ end := len(lines)
+
+ for start < end && leadingWhitespace(lines[start]) == math.MaxInt32 {
+ start++
+ }
+
+ for start < end && leadingWhitespace(lines[end-1]) == math.MaxInt32 {
+ end--
+ }
+
+ return strings.Join(lines[start:end], "\n")
+}
+
+func leadingWhitespace(str string) int {
+ for i, r := range str {
+ if r != ' ' && r != '\t' {
+ return i
+ }
+ }
+ // this line is made up entirely of whitespace, its leading whitespace doesnt count.
+ return math.MaxInt32
+}
diff --git a/vendor/github.com/vektah/gqlparser/lexer/lexer.go b/vendor/github.com/vektah/gqlparser/lexer/lexer.go
new file mode 100644
index 00000000..3aaa7102
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/lexer/lexer.go
@@ -0,0 +1,510 @@
+package lexer
+
+import (
+ "bytes"
+ "unicode/utf8"
+
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+// Lexer turns graphql request and schema strings into tokens
+type Lexer struct {
+ *ast.Source
+ // An offset into the string in bytes
+ start int
+ // An offset into the string in runes
+ startRunes int
+ // An offset into the string in bytes
+ end int
+ // An offset into the string in runes
+ endRunes int
+ // the current line number
+ line int
+ // An offset into the string in rune
+ lineStartRunes int
+}
+
+func New(src *ast.Source) Lexer {
+ return Lexer{
+ Source: src,
+ line: 1,
+ }
+}
+
+// take one rune from input and advance end
+func (s *Lexer) peek() (rune, int) {
+ return utf8.DecodeRuneInString(s.Input[s.end:])
+}
+
+func (s *Lexer) makeToken(kind Type) (Token, *gqlerror.Error) {
+ return s.makeValueToken(kind, s.Input[s.start:s.end])
+}
+
+func (s *Lexer) makeValueToken(kind Type, value string) (Token, *gqlerror.Error) {
+ return Token{
+ Kind: kind,
+ Value: value,
+ Pos: ast.Position{
+ Start: s.startRunes,
+ End: s.endRunes,
+ Line: s.line,
+ Column: s.startRunes - s.lineStartRunes + 1,
+ Src: s.Source,
+ },
+ }, nil
+}
+
+func (s *Lexer) makeError(format string, args ...interface{}) (Token, *gqlerror.Error) {
+ column := s.endRunes - s.lineStartRunes + 1
+ return Token{
+ Kind: Invalid,
+ Pos: ast.Position{
+ Start: s.startRunes,
+ End: s.endRunes,
+ Line: s.line,
+ Column: column,
+ Src: s.Source,
+ },
+ }, gqlerror.ErrorLocf(s.Source.Name, s.line, column, format, args...)
+}
+
+// ReadToken gets the next token from the source starting at the given position.
+//
+// This skips over whitespace and comments until it finds the next lexable
+// token, then lexes punctuators immediately or calls the appropriate helper
+// function for more complicated tokens.
+func (s *Lexer) ReadToken() (token Token, err *gqlerror.Error) {
+
+ s.ws()
+ s.start = s.end
+ s.startRunes = s.endRunes
+
+ if s.end >= len(s.Input) {
+ return s.makeToken(EOF)
+ }
+ r := s.Input[s.start]
+ s.end++
+ s.endRunes++
+ switch r {
+ case '!':
+ return s.makeValueToken(Bang, "")
+
+ case '$':
+ return s.makeValueToken(Dollar, "")
+ case '&':
+ return s.makeValueToken(Amp, "")
+ case '(':
+ return s.makeValueToken(ParenL, "")
+ case ')':
+ return s.makeValueToken(ParenR, "")
+ case '.':
+ if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == "..." {
+ s.end += 2
+ s.endRunes += 2
+ return s.makeValueToken(Spread, "")
+ }
+ case ':':
+ return s.makeValueToken(Colon, "")
+ case '=':
+ return s.makeValueToken(Equals, "")
+ case '@':
+ return s.makeValueToken(At, "")
+ case '[':
+ return s.makeValueToken(BracketL, "")
+ case ']':
+ return s.makeValueToken(BracketR, "")
+ case '{':
+ return s.makeValueToken(BraceL, "")
+ case '}':
+ return s.makeValueToken(BraceR, "")
+ case '|':
+ return s.makeValueToken(Pipe, "")
+ case '#':
+ s.readComment()
+ return s.ReadToken()
+
+ case '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z':
+ return s.readName()
+
+ case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ return s.readNumber()
+
+ case '"':
+ if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == `"""` {
+ return s.readBlockString()
+ }
+
+ return s.readString()
+ }
+
+ s.end--
+ s.endRunes--
+
+ if r < 0x0020 && r != 0x0009 && r != 0x000a && r != 0x000d {
+ return s.makeError(`Cannot contain the invalid character "\u%04d"`, r)
+ }
+
+ if r == '\'' {
+ return s.makeError(`Unexpected single quote character ('), did you mean to use a double quote (")?`)
+ }
+
+ return s.makeError(`Cannot parse the unexpected character "%s".`, string(r))
+}
+
+// ws reads from body starting at startPosition until it finds a non-whitespace
+// or commented character, and updates the token end to include all whitespace
+func (s *Lexer) ws() {
+ for s.end < len(s.Input) {
+ switch s.Input[s.end] {
+ case '\t', ' ', ',':
+ s.end++
+ s.endRunes++
+ case '\n':
+ s.end++
+ s.endRunes++
+ s.line++
+ s.lineStartRunes = s.endRunes
+ case '\r':
+ s.end++
+ s.endRunes++
+ s.line++
+ s.lineStartRunes = s.endRunes
+ // skip the following newline if its there
+ if s.end < len(s.Input) && s.Input[s.end] == '\n' {
+ s.end++
+ s.endRunes++
+ }
+ // byte order mark, given ws is hot path we aren't relying on the unicode package here.
+ case 0xef:
+ if s.end+2 < len(s.Input) && s.Input[s.end+1] == 0xBB && s.Input[s.end+2] == 0xBF {
+ s.end += 3
+ s.endRunes++
+ } else {
+ return
+ }
+ default:
+ return
+ }
+ }
+}
+
+// readComment from the input
+//
+// #[\u0009\u0020-\uFFFF]*
+func (s *Lexer) readComment() (Token, *gqlerror.Error) {
+ for s.end < len(s.Input) {
+ r, w := s.peek()
+
+ // SourceCharacter but not LineTerminator
+ if r > 0x001f || r == '\t' {
+ s.end += w
+ s.endRunes++
+ } else {
+ break
+ }
+ }
+
+ return s.makeToken(Comment)
+}
+
+// readNumber from the input, either a float
+// or an int depending on whether a decimal point appears.
+//
+// Int: -?(0|[1-9][0-9]*)
+// Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)?
+func (s *Lexer) readNumber() (Token, *gqlerror.Error) {
+ float := false
+
+ // backup to the first digit
+ s.end--
+ s.endRunes--
+
+ s.acceptByte('-')
+
+ if s.acceptByte('0') {
+ if consumed := s.acceptDigits(); consumed != 0 {
+ s.end -= consumed
+ s.endRunes -= consumed
+ return s.makeError("Invalid number, unexpected digit after 0: %s.", s.describeNext())
+ }
+ } else {
+ if consumed := s.acceptDigits(); consumed == 0 {
+ return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext())
+ }
+ }
+
+ if s.acceptByte('.') {
+ float = true
+
+ if consumed := s.acceptDigits(); consumed == 0 {
+ return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext())
+ }
+ }
+
+ if s.acceptByte('e', 'E') {
+ float = true
+
+ s.acceptByte('-', '+')
+
+ if consumed := s.acceptDigits(); consumed == 0 {
+ return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext())
+ }
+ }
+
+ if float {
+ return s.makeToken(Float)
+ } else {
+ return s.makeToken(Int)
+ }
+}
+
+// acceptByte if it matches any of given bytes, returning true if it found anything
+func (s *Lexer) acceptByte(bytes ...uint8) bool {
+ if s.end >= len(s.Input) {
+ return false
+ }
+
+ for _, accepted := range bytes {
+ if s.Input[s.end] == accepted {
+ s.end++
+ s.endRunes++
+ return true
+ }
+ }
+ return false
+}
+
+// acceptDigits from the input, returning the number of digits it found
+func (s *Lexer) acceptDigits() int {
+ consumed := 0
+ for s.end < len(s.Input) && s.Input[s.end] >= '0' && s.Input[s.end] <= '9' {
+ s.end++
+ s.endRunes++
+ consumed++
+ }
+
+ return consumed
+}
+
+// describeNext peeks at the input and returns a human readable string. This should will alloc
+// and should only be used in errors
+func (s *Lexer) describeNext() string {
+ if s.end < len(s.Input) {
+ return `"` + string(s.Input[s.end]) + `"`
+ }
+ return "<EOF>"
+}
+
+// readString from the input
+//
+// "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*"
+func (s *Lexer) readString() (Token, *gqlerror.Error) {
+ inputLen := len(s.Input)
+
+ // this buffer is lazily created only if there are escape characters.
+ var buf *bytes.Buffer
+
+ // skip the opening quote
+ s.start++
+ s.startRunes++
+
+ for s.end < inputLen {
+ r := s.Input[s.end]
+ if r == '\n' || r == '\r' {
+ break
+ }
+ if r < 0x0020 && r != '\t' {
+ return s.makeError(`Invalid character within String: "\u%04d".`, r)
+ }
+ switch r {
+ default:
+ var char = rune(r)
+ var w = 1
+
+ // skip unicode overhead if we are in the ascii range
+ if r >= 127 {
+ char, w = utf8.DecodeRuneInString(s.Input[s.end:])
+ }
+ s.end += w
+ s.endRunes++
+
+ if buf != nil {
+ buf.WriteRune(char)
+ }
+
+ case '"':
+ t, err := s.makeToken(String)
+ // the token should not include the quotes in its value, but should cover them in its position
+ t.Pos.Start--
+ t.Pos.End++
+
+ if buf != nil {
+ t.Value = buf.String()
+ }
+
+ // skip the close quote
+ s.end++
+ s.endRunes++
+
+ return t, err
+
+ case '\\':
+ if s.end+1 >= inputLen {
+ s.end++
+ s.endRunes++
+ return s.makeError(`Invalid character escape sequence.`)
+ }
+
+ if buf == nil {
+ buf = bytes.NewBufferString(s.Input[s.start:s.end])
+ }
+
+ escape := s.Input[s.end+1]
+
+ if escape == 'u' {
+ if s.end+6 >= inputLen {
+ s.end++
+ s.endRunes++
+ return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:])
+ }
+
+ r, ok := unhex(s.Input[s.end+2 : s.end+6])
+ if !ok {
+ s.end++
+ s.endRunes++
+ return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:s.end+5])
+ }
+ buf.WriteRune(r)
+ s.end += 6
+ s.endRunes += 6
+ } else {
+ switch escape {
+ case '"', '/', '\\':
+ buf.WriteByte(escape)
+ case 'b':
+ buf.WriteByte('\b')
+ case 'f':
+ buf.WriteByte('\f')
+ case 'n':
+ buf.WriteByte('\n')
+ case 'r':
+ buf.WriteByte('\r')
+ case 't':
+ buf.WriteByte('\t')
+ default:
+ s.end += 1
+ s.endRunes += 1
+ return s.makeError("Invalid character escape sequence: \\%s.", string(escape))
+ }
+ s.end += 2
+ s.endRunes += 2
+ }
+ }
+ }
+
+ return s.makeError("Unterminated string.")
+}
+
+// readBlockString from the input
+//
+// """("?"?(\\"""|\\(?!=""")|[^"\\]))*"""
+func (s *Lexer) readBlockString() (Token, *gqlerror.Error) {
+ inputLen := len(s.Input)
+
+ var buf bytes.Buffer
+
+ // skip the opening quote
+ s.start += 3
+ s.startRunes += 3
+ s.end += 2
+ s.endRunes += 2
+
+ for s.end < inputLen {
+ r := s.Input[s.end]
+
+ // Closing triple quote (""")
+ if r == '"' && s.end+3 <= inputLen && s.Input[s.end:s.end+3] == `"""` {
+ t, err := s.makeValueToken(BlockString, blockStringValue(buf.String()))
+
+ // the token should not include the quotes in its value, but should cover them in its position
+ t.Pos.Start -= 3
+ t.Pos.End += 3
+
+ // skip the close quote
+ s.end += 3
+ s.endRunes += 3
+
+ return t, err
+ }
+
+ // SourceCharacter
+ if r < 0x0020 && r != '\t' && r != '\n' && r != '\r' {
+ return s.makeError(`Invalid character within String: "\u%04d".`, r)
+ }
+
+ if r == '\\' && s.end+4 <= inputLen && s.Input[s.end:s.end+4] == `\"""` {
+ buf.WriteString(`"""`)
+ s.end += 4
+ s.endRunes += 4
+ } else if r == '\r' {
+ if s.end+1 <= inputLen && s.Input[s.end+1] == '\n' {
+ s.end++
+ s.endRunes++
+ }
+
+ buf.WriteByte('\n')
+ s.end++
+ s.endRunes++
+ } else {
+ var char = rune(r)
+ var w = 1
+
+ // skip unicode overhead if we are in the ascii range
+ if r >= 127 {
+ char, w = utf8.DecodeRuneInString(s.Input[s.end:])
+ }
+ s.end += w
+ s.endRunes++
+ buf.WriteRune(char)
+ }
+ }
+
+ return s.makeError("Unterminated string.")
+}
+
+func unhex(b string) (v rune, ok bool) {
+ for _, c := range b {
+ v <<= 4
+ switch {
+ case '0' <= c && c <= '9':
+ v |= c - '0'
+ case 'a' <= c && c <= 'f':
+ v |= c - 'a' + 10
+ case 'A' <= c && c <= 'F':
+ v |= c - 'A' + 10
+ default:
+ return 0, false
+ }
+ }
+
+ return v, true
+}
+
+// readName from the input
+//
+// [_A-Za-z][_0-9A-Za-z]*
+func (s *Lexer) readName() (Token, *gqlerror.Error) {
+ for s.end < len(s.Input) {
+ r, w := s.peek()
+
+ if (r >= '0' && r <= '9') || (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' {
+ s.end += w
+ s.endRunes++
+ } else {
+ break
+ }
+ }
+
+ return s.makeToken(Name)
+}
diff --git a/vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml b/vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml
new file mode 100644
index 00000000..e2c26696
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/lexer/lexer_test.yml
@@ -0,0 +1,672 @@
+encoding:
+ - name: disallows uncommon control characters
+ input: "\u0007"
+ error:
+ message: 'Cannot contain the invalid character "\u0007"'
+ locations: [{line: 1, column: 1}]
+
+ - name: accepts BOM header
+ input: "\uFEFF foo"
+ tokens:
+ -
+ kind: NAME
+ start: 2
+ end: 5
+ value: 'foo'
+
+simple tokens:
+ - name: records line and column
+ input: "\n \r\n \r foo\n"
+ tokens:
+ -
+ kind: NAME
+ start: 8
+ end: 11
+ line: 4
+ column: 3
+ value: 'foo'
+
+ - name: skips whitespace
+ input: "\n\n foo\n\n\n"
+ tokens:
+ -
+ kind: NAME
+ start: 6
+ end: 9
+ value: 'foo'
+
+ - name: skips comments
+ input: "\n #comment\n foo#comment\n"
+ tokens:
+ -
+ kind: NAME
+ start: 18
+ end: 21
+ value: 'foo'
+
+ - name: skips commas
+ input: ",,,foo,,,"
+ tokens:
+ -
+ kind: NAME
+ start: 3
+ end: 6
+ value: 'foo'
+
+ - name: errors respect whitespace
+ input: "\n\n ?\n\n\n"
+ error:
+ message: 'Cannot parse the unexpected character "?".'
+ locations: [{line: 3, column: 5}]
+ string: |
+ Syntax Error: Cannot parse the unexpected character "?".
+ GraphQL request (3:5)
+ 2:
+ 3: ?
+ ^
+ 4:
+
+ - name: lex reports useful information for dashes in names
+ input: "a-b"
+ error:
+ message: 'Invalid number, expected digit but got: "b".'
+ locations: [{ line: 1, column: 3 }]
+ tokens:
+ -
+ kind: Name
+ start: 0
+ end: 1
+ value: a
+
+lexes strings:
+ - name: basic
+ input: '"simple"'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 8
+ value: 'simple'
+
+ - name: whitespace
+ input: '" white space "'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 15
+ value: ' white space '
+
+ - name: quote
+ input: '"quote \""'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 10
+ value: 'quote "'
+
+ - name: escaped
+ input: '"escaped \n\r\b\t\f"'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 20
+ value: "escaped \n\r\b\t\f"
+
+ - name: slashes
+ input: '"slashes \\ \/"'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 15
+ value: 'slashes \ /'
+
+ - name: unicode
+ input: '"unicode \u1234\u5678\u90AB\uCDEF"'
+ tokens:
+ -
+ kind: STRING
+ start: 0
+ end: 34
+ value: "unicode \u1234\u5678\u90AB\uCDEF"
+
+lex reports useful string errors:
+ - name: unterminated
+ input: '"'
+ error:
+ message: "Unterminated string."
+ locations: [{ line: 1, column: 2 }]
+
+ - name: no end quote
+ input: '"no end quote'
+ error:
+ message: 'Unterminated string.'
+ locations: [{ line: 1, column: 14 }]
+
+ - name: single quotes
+ input: "'single quotes'"
+ error:
+ message: "Unexpected single quote character ('), did you mean to use a double quote (\")?"
+ locations: [{ line: 1, column: 1 }]
+
+ - name: control characters
+ input: "\"contains unescaped \u0007 control char\""
+ error:
+ message: 'Invalid character within String: "\u0007".'
+ locations: [{ line: 1, column: 21 }]
+
+ - name: null byte
+ input: "\"null-byte is not \u0000 end of file\""
+ error:
+ message: 'Invalid character within String: "\u0000".'
+ locations: [{ line: 1, column: 19 }]
+
+ - name: unterminated newline
+ input: "\"multi\nline\""
+ error:
+ message: 'Unterminated string.'
+ locations: [{line: 1, column: 7 }]
+
+ - name: unterminated carriage return
+ input: "\"multi\rline\""
+ error:
+ message: 'Unterminated string.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: bad escape character
+ input: '"bad \z esc"'
+ error:
+ message: 'Invalid character escape sequence: \z.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: hex escape sequence
+ input: '"bad \x esc"'
+ error:
+ message: 'Invalid character escape sequence: \x.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: short escape sequence
+ input: '"bad \u1 esc"'
+ error:
+ message: 'Invalid character escape sequence: \u1 es.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: invalid escape sequence 1
+ input: '"bad \u0XX1 esc"'
+ error:
+ message: 'Invalid character escape sequence: \u0XX1.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: invalid escape sequence 2
+ input: '"bad \uXXXX esc"'
+ error:
+ message: 'Invalid character escape sequence: \uXXXX.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: invalid escape sequence 3
+ input: '"bad \uFXXX esc"'
+ error:
+ message: 'Invalid character escape sequence: \uFXXX.'
+ locations: [{ line: 1, column: 7 }]
+
+ - name: invalid character escape sequence
+ input: '"bad \uXXXF esc"'
+ error:
+ message: 'Invalid character escape sequence: \uXXXF.'
+ locations: [{ line: 1, column: 7 }]
+
+lexes block strings:
+ - name: simple
+ input: '"""simple"""'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 12
+ value: 'simple'
+
+ - name: white space
+ input: '""" white space """'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 19
+ value: ' white space '
+
+ - name: contains quote
+ input: '"""contains " quote"""'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 22
+ value: 'contains " quote'
+
+ - name: contains triplequote
+ input: "\"\"\"contains \\\"\"\" triplequote\"\"\""
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 31
+ value: 'contains """ triplequote'
+
+ - name: multi line
+ input: "\"\"\"multi\nline\"\"\""
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 16
+ value: "multi\nline"
+
+ - name: multi line normalized
+ input: "\"\"\"multi\rline\r\nnormalized\"\"\""
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 28
+ value: "multi\nline\nnormalized"
+
+ - name: unescaped
+ input: '"""unescaped \n\r\b\t\f\u1234"""'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 32
+ value: 'unescaped \n\r\b\t\f\u1234'
+
+ - name: slashes
+ input: '"""slashes \\ \/"""'
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 19
+ value: 'slashes \\ \/'
+
+ - name: multiple lines
+ input: |
+ """
+
+ spans
+ multiple
+ lines
+
+ """
+ tokens:
+ -
+ kind: BLOCK_STRING
+ start: 0
+ end: 36
+ value: "spans\n multiple\n lines"
+
+lex reports useful block string errors:
+ - name: unterminated string
+ input: '"""'
+ error:
+ message: "Unterminated string."
+ locations: [{ line: 1, column: 4 }]
+
+ - name: unescaped control characters
+ input: "\"\"\"contains unescaped \u0007 control char\"\"\""
+ error:
+ message: 'Invalid character within String: "\u0007".'
+ locations: [{ line: 1, column: 23 }]
+
+ - name: null byte
+ input: "\"\"\"null-byte is not \u0000 end of file\"\"\""
+ error:
+ message: 'Invalid character within String: "\u0000".'
+ locations: [{ line: 1, column: 21 }]
+
+lexes numbers:
+ - name: integer
+ input: "4"
+ tokens:
+ -
+ kind: INT
+ start: 0
+ end: 1
+ value: '4'
+
+ - name: float
+ input: "4.123"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 5
+ value: '4.123'
+
+ - name: negative
+ input: "-4"
+ tokens:
+ -
+ kind: INT
+ start: 0
+ end: 2
+ value: '-4'
+
+ - name: nine
+ input: "9"
+ tokens:
+ -
+ kind: INT
+ start: 0
+ end: 1
+ value: '9'
+
+ - name: zero
+ input: "0"
+ tokens:
+ -
+ kind: INT
+ start: 0
+ end: 1
+ value: '0'
+
+ - name: negative float
+ input: "-4.123"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 6
+ value: '-4.123'
+
+ - name: float leading zero
+ input: "0.123"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 5
+ value: '0.123'
+
+ - name: exponent whole
+ input: "123e4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 5
+ value: '123e4'
+
+ - name: exponent uppercase
+ input: "123E4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 5
+ value: '123E4'
+
+ - name: exponent negative power
+ input: "123e-4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 6
+ value: '123e-4'
+
+ - name: exponent positive power
+ input: "123e+4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 6
+ value: '123e+4'
+
+ - name: exponent negative base
+ input: "-1.123e4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 8
+ value: '-1.123e4'
+
+ - name: exponent negative base upper
+ input: "-1.123E4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 8
+ value: '-1.123E4'
+
+ - name: exponent negative base negative power
+ input: "-1.123e-4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 9
+ value: '-1.123e-4'
+
+ - name: exponent negative base positive power
+ input: "-1.123e+4"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 9
+ value: '-1.123e+4'
+
+ - name: exponent negative base large power
+ input: "-1.123e4567"
+ tokens:
+ -
+ kind: FLOAT
+ start: 0
+ end: 11
+ value: '-1.123e4567'
+
+lex reports useful number errors:
+ - name: zero
+ input: "00"
+ error:
+ message: 'Invalid number, unexpected digit after 0: "0".'
+ locations: [{ line: 1, column: 2 }]
+
+ - name: positive
+ input: "+1"
+ error:
+ message: 'Cannot parse the unexpected character "+".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: trailing dot
+ input: "1."
+ error:
+ message: 'Invalid number, expected digit but got: <EOF>.'
+ locations: [{ line: 1, column: 3 }]
+
+ - name: traililng dot exponent
+ input: "1.e1"
+ error:
+ message: 'Invalid number, expected digit but got: "e".'
+ locations: [{ line: 1, column: 3 }]
+
+ - name: missing leading zero
+ input: ".123"
+ error:
+ message: 'Cannot parse the unexpected character ".".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: characters
+ input: "1.A"
+ error:
+ message: 'Invalid number, expected digit but got: "A".'
+ locations: [{ line: 1, column: 3 }]
+
+ - name: negative characters
+ input: "-A"
+ error:
+ message: 'Invalid number, expected digit but got: "A".'
+ locations: [{ line: 1, column: 2 }]
+
+ - name: missing exponent
+ input: '1.0e'
+ error:
+ message: 'Invalid number, expected digit but got: <EOF>.'
+ locations: [{ line: 1, column: 5 }]
+
+ - name: character exponent
+ input: "1.0eA"
+ error:
+ message: 'Invalid number, expected digit but got: "A".'
+ locations: [{ line: 1, column: 5 }]
+
+lexes punctuation:
+ - name: bang
+ input: "!"
+ tokens:
+ -
+ kind: BANG
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: dollar
+ input: "$"
+ tokens:
+ -
+ kind: DOLLAR
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: open paren
+ input: "("
+ tokens:
+ -
+ kind: PAREN_L
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: close paren
+ input: ")"
+ tokens:
+ -
+ kind: PAREN_R
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: spread
+ input: "..."
+ tokens:
+ -
+ kind: SPREAD
+ start: 0
+ end: 3
+ value: undefined
+
+ - name: colon
+ input: ":"
+ tokens:
+ -
+ kind: COLON
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: equals
+ input: "="
+ tokens:
+ -
+ kind: EQUALS
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: at
+ input: "@"
+ tokens:
+ -
+ kind: AT
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: open bracket
+ input: "["
+ tokens:
+ -
+ kind: BRACKET_L
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: close bracket
+ input: "]"
+ tokens:
+ -
+ kind: BRACKET_R
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: open brace
+ input: "{"
+ tokens:
+ -
+ kind: BRACE_L
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: close brace
+ input: "}"
+ tokens:
+ -
+ kind: BRACE_R
+ start: 0
+ end: 1
+ value: undefined
+
+ - name: pipe
+ input: "|"
+ tokens:
+ -
+ kind: PIPE
+ start: 0
+ end: 1
+ value: undefined
+
+lex reports useful unknown character error:
+ - name: not a spread
+ input: ".."
+ error:
+ message: 'Cannot parse the unexpected character ".".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: question mark
+ input: "?"
+ error:
+ message: 'Cannot parse the unexpected character "?".'
+ message: 'Cannot parse the unexpected character "?".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: unicode 203
+ input: "\u203B"
+ error:
+ message: 'Cannot parse the unexpected character "â".'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: unicode 200
+ input: "\u200b"
+ error:
+ message: 'Cannot parse the unexpected character "â".'
+ locations: [{ line: 1, column: 1 }]
+
diff --git a/vendor/github.com/vektah/gqlparser/lexer/token.go b/vendor/github.com/vektah/gqlparser/lexer/token.go
new file mode 100644
index 00000000..aef8b729
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/lexer/token.go
@@ -0,0 +1,148 @@
+package lexer
+
+import (
+ "strconv"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+const (
+ Invalid Type = iota
+ EOF
+ Bang
+ Dollar
+ Amp
+ ParenL
+ ParenR
+ Spread
+ Colon
+ Equals
+ At
+ BracketL
+ BracketR
+ BraceL
+ BraceR
+ Pipe
+ Name
+ Int
+ Float
+ String
+ BlockString
+ Comment
+)
+
+func (t Type) Name() string {
+ switch t {
+ case Invalid:
+ return "Invalid"
+ case EOF:
+ return "EOF"
+ case Bang:
+ return "Bang"
+ case Dollar:
+ return "Dollar"
+ case Amp:
+ return "Amp"
+ case ParenL:
+ return "ParenL"
+ case ParenR:
+ return "ParenR"
+ case Spread:
+ return "Spread"
+ case Colon:
+ return "Colon"
+ case Equals:
+ return "Equals"
+ case At:
+ return "At"
+ case BracketL:
+ return "BracketL"
+ case BracketR:
+ return "BracketR"
+ case BraceL:
+ return "BraceL"
+ case BraceR:
+ return "BraceR"
+ case Pipe:
+ return "Pipe"
+ case Name:
+ return "Name"
+ case Int:
+ return "Int"
+ case Float:
+ return "Float"
+ case String:
+ return "String"
+ case BlockString:
+ return "BlockString"
+ case Comment:
+ return "Comment"
+ }
+ return "Unknown " + strconv.Itoa(int(t))
+}
+
+func (t Type) String() string {
+ switch t {
+ case Invalid:
+ return "<Invalid>"
+ case EOF:
+ return "<EOF>"
+ case Bang:
+ return "!"
+ case Dollar:
+ return "$"
+ case Amp:
+ return "&"
+ case ParenL:
+ return "("
+ case ParenR:
+ return ")"
+ case Spread:
+ return "..."
+ case Colon:
+ return ":"
+ case Equals:
+ return "="
+ case At:
+ return "@"
+ case BracketL:
+ return "["
+ case BracketR:
+ return "]"
+ case BraceL:
+ return "{"
+ case BraceR:
+ return "}"
+ case Pipe:
+ return "|"
+ case Name:
+ return "Name"
+ case Int:
+ return "Int"
+ case Float:
+ return "Float"
+ case String:
+ return "String"
+ case BlockString:
+ return "BlockString"
+ case Comment:
+ return "Comment"
+ }
+ return "Unknown " + strconv.Itoa(int(t))
+}
+
+// Kind represents a type of token. The types are predefined as constants.
+type Type int
+
+type Token struct {
+ Kind Type // The token type.
+ Value string // The literal value consumed.
+ Pos ast.Position // The file and line this token was read from
+}
+
+func (t Token) String() string {
+ if t.Value != "" {
+ return t.Kind.String() + " " + strconv.Quote(t.Value)
+ }
+ return t.Kind.String()
+}
diff --git a/vendor/github.com/vektah/gqlparser/parser/parser.go b/vendor/github.com/vektah/gqlparser/parser/parser.go
new file mode 100644
index 00000000..f3648cb3
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/parser.go
@@ -0,0 +1,112 @@
+package parser
+
+import (
+ "strconv"
+
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/lexer"
+)
+
+type parser struct {
+ lexer lexer.Lexer
+ err *gqlerror.Error
+
+ peeked bool
+ peekToken lexer.Token
+ peekError *gqlerror.Error
+
+ prev lexer.Token
+}
+
+func (p *parser) peekPos() *ast.Position {
+ if p.err != nil {
+ return nil
+ }
+
+ peek := p.peek()
+ return &peek.Pos
+}
+
+func (p *parser) peek() lexer.Token {
+ if p.err != nil {
+ return p.prev
+ }
+
+ if !p.peeked {
+ p.peekToken, p.peekError = p.lexer.ReadToken()
+ p.peeked = true
+ }
+
+ return p.peekToken
+}
+
+func (p *parser) error(tok lexer.Token, format string, args ...interface{}) {
+ if p.err != nil {
+ return
+ }
+ p.err = gqlerror.ErrorLocf(tok.Pos.Src.Name, tok.Pos.Line, tok.Pos.Column, format, args...)
+}
+
+func (p *parser) next() lexer.Token {
+ if p.err != nil {
+ return p.prev
+ }
+ if p.peeked {
+ p.peeked = false
+ p.prev, p.err = p.peekToken, p.peekError
+ } else {
+ p.prev, p.err = p.lexer.ReadToken()
+ }
+ return p.prev
+}
+
+func (p *parser) expectKeyword(value string) lexer.Token {
+ tok := p.peek()
+ if tok.Kind == lexer.Name && tok.Value == value {
+ return p.next()
+ }
+
+ p.error(tok, "Expected %s, found %s", strconv.Quote(value), tok.String())
+ return tok
+}
+
+func (p *parser) expect(kind lexer.Type) lexer.Token {
+ tok := p.peek()
+ if tok.Kind == kind {
+ return p.next()
+ }
+
+ p.error(tok, "Expected %s, found %s", kind, tok.Kind.String())
+ return tok
+}
+
+func (p *parser) skip(kind lexer.Type) bool {
+ tok := p.peek()
+
+ if tok.Kind != kind {
+ return false
+ }
+ p.next()
+ return true
+}
+
+func (p *parser) unexpectedError() {
+ p.unexpectedToken(p.peek())
+}
+
+func (p *parser) unexpectedToken(tok lexer.Token) {
+ p.error(tok, "Unexpected %s", tok.String())
+}
+
+func (p *parser) many(start lexer.Type, end lexer.Type, cb func()) {
+ hasDef := p.skip(start)
+ if !hasDef {
+ return
+ }
+
+ for p.peek().Kind != end && p.err == nil {
+ cb()
+ }
+ p.next()
+}
diff --git a/vendor/github.com/vektah/gqlparser/parser/query.go b/vendor/github.com/vektah/gqlparser/parser/query.go
new file mode 100644
index 00000000..7fecb57f
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/query.go
@@ -0,0 +1,334 @@
+package parser
+
+import (
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/lexer"
+
+ . "github.com/vektah/gqlparser/ast"
+)
+
+func ParseQuery(source *Source) (*QueryDocument, *gqlerror.Error) {
+ p := parser{
+ lexer: lexer.New(source),
+ }
+ return p.parseQueryDocument(), p.err
+}
+
+func (p *parser) parseQueryDocument() *QueryDocument {
+ var doc QueryDocument
+ for p.peek().Kind != lexer.EOF {
+ if p.err != nil {
+ return &doc
+ }
+ doc.Position = p.peekPos()
+ switch p.peek().Kind {
+ case lexer.Name:
+ switch p.peek().Value {
+ case "query", "mutation", "subscription":
+ doc.Operations = append(doc.Operations, p.parseOperationDefinition())
+ case "fragment":
+ doc.Fragments = append(doc.Fragments, p.parseFragmentDefinition())
+ default:
+ p.unexpectedError()
+ }
+ case lexer.BraceL:
+ doc.Operations = append(doc.Operations, p.parseOperationDefinition())
+ default:
+ p.unexpectedError()
+ }
+ }
+
+ return &doc
+}
+
+func (p *parser) parseOperationDefinition() *OperationDefinition {
+ if p.peek().Kind == lexer.BraceL {
+ return &OperationDefinition{
+ Position: p.peekPos(),
+ Operation: Query,
+ SelectionSet: p.parseSelectionSet(),
+ }
+ }
+
+ var od OperationDefinition
+ od.Position = p.peekPos()
+ od.Operation = p.parseOperationType()
+
+ if p.peek().Kind == lexer.Name {
+ od.Name = p.next().Value
+ }
+
+ od.VariableDefinitions = p.parseVariableDefinitions()
+ od.Directives = p.parseDirectives(false)
+ od.SelectionSet = p.parseSelectionSet()
+
+ return &od
+}
+
+func (p *parser) parseOperationType() Operation {
+ tok := p.next()
+ switch tok.Value {
+ case "query":
+ return Query
+ case "mutation":
+ return Mutation
+ case "subscription":
+ return Subscription
+ }
+ p.unexpectedToken(tok)
+ return ""
+}
+
+func (p *parser) parseVariableDefinitions() VariableDefinitionList {
+ var defs []*VariableDefinition
+ p.many(lexer.ParenL, lexer.ParenR, func() {
+ defs = append(defs, p.parseVariableDefinition())
+ })
+
+ return defs
+}
+
+func (p *parser) parseVariableDefinition() *VariableDefinition {
+ var def VariableDefinition
+ def.Position = p.peekPos()
+ def.Variable = p.parseVariable()
+
+ p.expect(lexer.Colon)
+
+ def.Type = p.parseTypeReference()
+
+ if p.skip(lexer.Equals) {
+ def.DefaultValue = p.parseValueLiteral(true)
+ }
+
+ return &def
+}
+
+func (p *parser) parseVariable() string {
+ p.expect(lexer.Dollar)
+ return p.parseName()
+}
+
+func (p *parser) parseSelectionSet() SelectionSet {
+ var selections []Selection
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ selections = append(selections, p.parseSelection())
+ })
+
+ return SelectionSet(selections)
+}
+
+func (p *parser) parseSelection() Selection {
+ if p.peek().Kind == lexer.Spread {
+ return p.parseFragment()
+ }
+ return p.parseField()
+}
+
+func (p *parser) parseField() *Field {
+ var field Field
+ field.Position = p.peekPos()
+ field.Alias = p.parseName()
+
+ if p.skip(lexer.Colon) {
+ field.Name = p.parseName()
+ } else {
+ field.Name = field.Alias
+ }
+
+ field.Arguments = p.parseArguments(false)
+ field.Directives = p.parseDirectives(false)
+ if p.peek().Kind == lexer.BraceL {
+ field.SelectionSet = p.parseSelectionSet()
+ }
+
+ return &field
+}
+
+func (p *parser) parseArguments(isConst bool) ArgumentList {
+ var arguments ArgumentList
+ p.many(lexer.ParenL, lexer.ParenR, func() {
+ arguments = append(arguments, p.parseArgument(isConst))
+ })
+
+ return arguments
+}
+
+func (p *parser) parseArgument(isConst bool) *Argument {
+ arg := Argument{}
+ arg.Position = p.peekPos()
+ arg.Name = p.parseName()
+ p.expect(lexer.Colon)
+
+ arg.Value = p.parseValueLiteral(isConst)
+ return &arg
+}
+
+func (p *parser) parseFragment() Selection {
+ p.expect(lexer.Spread)
+
+ if peek := p.peek(); peek.Kind == lexer.Name && peek.Value != "on" {
+ return &FragmentSpread{
+ Position: p.peekPos(),
+ Name: p.parseFragmentName(),
+ Directives: p.parseDirectives(false),
+ }
+ }
+
+ var def InlineFragment
+ def.Position = p.peekPos()
+ if p.peek().Value == "on" {
+ p.next() // "on"
+
+ def.TypeCondition = p.parseName()
+ }
+
+ def.Directives = p.parseDirectives(false)
+ def.SelectionSet = p.parseSelectionSet()
+ return &def
+}
+
+func (p *parser) parseFragmentDefinition() *FragmentDefinition {
+ var def FragmentDefinition
+ def.Position = p.peekPos()
+ p.expectKeyword("fragment")
+
+ def.Name = p.parseFragmentName()
+ def.VariableDefinition = p.parseVariableDefinitions()
+
+ p.expectKeyword("on")
+
+ def.TypeCondition = p.parseName()
+ def.Directives = p.parseDirectives(false)
+ def.SelectionSet = p.parseSelectionSet()
+ return &def
+}
+
+func (p *parser) parseFragmentName() string {
+ if p.peek().Value == "on" {
+ p.unexpectedError()
+ return ""
+ }
+
+ return p.parseName()
+}
+
+func (p *parser) parseValueLiteral(isConst bool) *Value {
+ token := p.peek()
+
+ var kind ValueKind
+ switch token.Kind {
+ case lexer.BracketL:
+ return p.parseList(isConst)
+ case lexer.BraceL:
+ return p.parseObject(isConst)
+ case lexer.Dollar:
+ if isConst {
+ p.unexpectedError()
+ return nil
+ }
+ return &Value{Position: &token.Pos, Raw: p.parseVariable(), Kind: Variable}
+ case lexer.Int:
+ kind = IntValue
+ case lexer.Float:
+ kind = FloatValue
+ case lexer.String:
+ kind = StringValue
+ case lexer.BlockString:
+ kind = BlockValue
+ case lexer.Name:
+ switch token.Value {
+ case "true", "false":
+ kind = BooleanValue
+ case "null":
+ kind = NullValue
+ default:
+ kind = EnumValue
+ }
+ default:
+ p.unexpectedError()
+ return nil
+ }
+
+ p.next()
+
+ return &Value{Position: &token.Pos, Raw: token.Value, Kind: kind}
+}
+
+func (p *parser) parseList(isConst bool) *Value {
+ var values ChildValueList
+ pos := p.peekPos()
+ p.many(lexer.BracketL, lexer.BracketR, func() {
+ values = append(values, &ChildValue{Value: p.parseValueLiteral(isConst)})
+ })
+
+ return &Value{Children: values, Kind: ListValue, Position: pos}
+}
+
+func (p *parser) parseObject(isConst bool) *Value {
+ var fields ChildValueList
+ pos := p.peekPos()
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ fields = append(fields, p.parseObjectField(isConst))
+ })
+
+ return &Value{Children: fields, Kind: ObjectValue, Position: pos}
+}
+
+func (p *parser) parseObjectField(isConst bool) *ChildValue {
+ field := ChildValue{}
+ field.Position = p.peekPos()
+ field.Name = p.parseName()
+
+ p.expect(lexer.Colon)
+
+ field.Value = p.parseValueLiteral(isConst)
+ return &field
+}
+
+func (p *parser) parseDirectives(isConst bool) []*Directive {
+ var directives []*Directive
+
+ for p.peek().Kind == lexer.At {
+ if p.err != nil {
+ break
+ }
+ directives = append(directives, p.parseDirective(isConst))
+ }
+ return directives
+}
+
+func (p *parser) parseDirective(isConst bool) *Directive {
+ p.expect(lexer.At)
+
+ return &Directive{
+ Position: p.peekPos(),
+ Name: p.parseName(),
+ Arguments: p.parseArguments(isConst),
+ }
+}
+
+func (p *parser) parseTypeReference() *Type {
+ var typ Type
+
+ if p.skip(lexer.BracketL) {
+ typ.Position = p.peekPos()
+ typ.Elem = p.parseTypeReference()
+ p.expect(lexer.BracketR)
+ } else {
+ typ.Position = p.peekPos()
+ typ.NamedType = p.parseName()
+ }
+
+ if p.skip(lexer.Bang) {
+ typ.Position = p.peekPos()
+ typ.NonNull = true
+ }
+ return &typ
+}
+
+func (p *parser) parseName() string {
+ token := p.expect(lexer.Name)
+
+ return token.Value
+}
diff --git a/vendor/github.com/vektah/gqlparser/parser/query_test.yml b/vendor/github.com/vektah/gqlparser/parser/query_test.yml
new file mode 100644
index 00000000..f392eb8e
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/query_test.yml
@@ -0,0 +1,507 @@
+parser provides useful errors:
+ - name: unclosed paren
+ input: '{'
+ error:
+ message: "Expected Name, found <EOF>"
+ locations: [{line: 1, column: 2}]
+
+ - name: missing on in fragment
+ input: |
+ { ...MissingOn }
+ fragment MissingOn Type
+ error:
+ message: 'Expected "on", found Name "Type"'
+ locations: [{ line: 2, column: 20 }]
+
+ - name: missing name after alias
+ input: '{ field: {} }'
+ error:
+ message: "Expected Name, found {"
+ locations: [{ line: 1, column: 10 }]
+
+ - name: not an operation
+ input: 'notanoperation Foo { field }'
+ error:
+ message: 'Unexpected Name "notanoperation"'
+ locations: [{ line: 1, column: 1 }]
+
+ - name: a wild splat appears
+ input: '...'
+ error:
+ message: 'Unexpected ...'
+ locations: [{ line: 1, column: 1}]
+
+variables:
+ - name: are allowed in args
+ input: '{ field(complex: { a: { b: [ $var ] } }) }'
+
+ - name: are not allowed in default args
+ input: 'query Foo($x: Complex = { a: { b: [ $var ] } }) { field }'
+ error:
+ message: 'Unexpected $'
+ locations: [{ line: 1, column: 37 }]
+
+fragments:
+ - name: can not be named 'on'
+ input: 'fragment on on on { on }'
+ error:
+ message: 'Unexpected Name "on"'
+ locations: [{ line: 1, column: 10 }]
+
+ - name: can not spread fragments called 'on'
+ input: '{ ...on }'
+ error:
+ message: 'Expected Name, found }'
+ locations: [{ line: 1, column: 9 }]
+
+encoding:
+ - name: multibyte characters are supported
+ input: |
+ # This comment has a ਊ multi-byte character.
+ { field(arg: "Has a ਊ multi-byte character.") }
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "field"
+ Name: "field"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "arg"
+ Value: "Has a ਊ multi-byte character."
+
+keywords are allowed anywhere a name is:
+ - name: on
+ input: |
+ query on {
+ ... a
+ ... on on { field }
+ }
+ fragment a on Type {
+ on(on: $on)
+ @on(on: on)
+ }
+
+ - name: subscription
+ input: |
+ query subscription {
+ ... subscription
+ ... on subscription { field }
+ }
+ fragment subscription on Type {
+ subscription(subscription: $subscription)
+ @subscription(subscription: subscription)
+ }
+
+ - name: true
+ input: |
+ query true {
+ ... true
+ ... on true { field }
+ }
+ fragment true on Type {
+ true(true: $true)
+ @true(true: true)
+ }
+
+operations:
+ - name: anonymous mutation
+ input: 'mutation { mutationField }'
+
+ - name: named mutation
+ input: 'mutation Foo { mutationField }'
+
+ - name: anonymous subscription
+ input: 'subscription { subscriptionField }'
+
+ - name: named subscription
+ input: 'subscription Foo { subscriptionField }'
+
+
+ast:
+ - name: simple query
+ input: |
+ {
+ node(id: 4) {
+ id,
+ name
+ }
+ }
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "node"
+ Name: "node"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "id"
+ Value: 4
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <Field>
+ Alias: "name"
+ Name: "name"
+
+ - name: nameless query with no variables
+ input: |
+ query {
+ node {
+ id
+ }
+ }
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "node"
+ Name: "node"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+
+ - name: fragment defined variables
+ input: 'fragment a($v: Boolean = false) on t { f(v: $v) }'
+ ast: |
+ <QueryDocument>
+ Fragments: [FragmentDefinition]
+ - <FragmentDefinition>
+ Name: "a"
+ VariableDefinition: [VariableDefinition]
+ - <VariableDefinition>
+ Variable: "v"
+ Type: Boolean
+ DefaultValue: false
+ TypeCondition: "t"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "v"
+ Value: $v
+
+
+values:
+ - name: null
+ input: '{ f(id: null) }'
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "id"
+ Value: null
+
+ - name: strings
+ input: '{ f(long: """long""", short: "short") } '
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "long"
+ Value: "long"
+ - <Argument>
+ Name: "short"
+ Value: "short"
+
+ - name: list
+ input: '{ f(id: [1,2]) }'
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "id"
+ Value: [1,2]
+
+types:
+ - name: common types
+ input: 'query ($string: String, $int: Int, $arr: [Arr], $notnull: [Arr!]!) { f }'
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ VariableDefinitions: [VariableDefinition]
+ - <VariableDefinition>
+ Variable: "string"
+ Type: String
+ - <VariableDefinition>
+ Variable: "int"
+ Type: Int
+ - <VariableDefinition>
+ Variable: "arr"
+ Type: [Arr]
+ - <VariableDefinition>
+ Variable: "notnull"
+ Type: [Arr!]!
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "f"
+ Name: "f"
+
+large queries:
+ - name: kitchen sink
+ input: |
+ # Copyright (c) 2015-present, Facebook, Inc.
+ #
+ # This source code is licensed under the MIT license found in the
+ # LICENSE file in the root directory of this source tree.
+
+ query queryName($foo: ComplexType, $site: Site = MOBILE) {
+ whoever123is: node(id: [123, 456]) {
+ id ,
+ ... on User @defer {
+ field2 {
+ id ,
+ alias: field1(first:10, after:$foo,) @include(if: $foo) {
+ id,
+ ...frag
+ }
+ }
+ }
+ ... @skip(unless: $foo) {
+ id
+ }
+ ... {
+ id
+ }
+ }
+ }
+
+ mutation likeStory {
+ like(story: 123) @defer {
+ story {
+ id
+ }
+ }
+ }
+
+ subscription StoryLikeSubscription($input: StoryLikeSubscribeInput) {
+ storyLikeSubscribe(input: $input) {
+ story {
+ likers {
+ count
+ }
+ likeSentence {
+ text
+ }
+ }
+ }
+ }
+
+ fragment frag on Friend {
+ foo(size: $size, bar: $b, obj: {key: "value", block: """
+ block string uses \"""
+ """})
+ }
+
+ {
+ unnamed(truthy: true, falsey: false, nullish: null),
+ query
+ }
+ ast: |
+ <QueryDocument>
+ Operations: [OperationDefinition]
+ - <OperationDefinition>
+ Operation: Operation("query")
+ Name: "queryName"
+ VariableDefinitions: [VariableDefinition]
+ - <VariableDefinition>
+ Variable: "foo"
+ Type: ComplexType
+ - <VariableDefinition>
+ Variable: "site"
+ Type: Site
+ DefaultValue: MOBILE
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "whoever123is"
+ Name: "node"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "id"
+ Value: [123,456]
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <InlineFragment>
+ TypeCondition: "User"
+ Directives: [Directive]
+ - <Directive>
+ Name: "defer"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "field2"
+ Name: "field2"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <Field>
+ Alias: "alias"
+ Name: "field1"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "first"
+ Value: 10
+ - <Argument>
+ Name: "after"
+ Value: $foo
+ Directives: [Directive]
+ - <Directive>
+ Name: "include"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "if"
+ Value: $foo
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <FragmentSpread>
+ Name: "frag"
+ - <InlineFragment>
+ Directives: [Directive]
+ - <Directive>
+ Name: "skip"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "unless"
+ Value: $foo
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <InlineFragment>
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <OperationDefinition>
+ Operation: Operation("mutation")
+ Name: "likeStory"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "like"
+ Name: "like"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "story"
+ Value: 123
+ Directives: [Directive]
+ - <Directive>
+ Name: "defer"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "story"
+ Name: "story"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "id"
+ Name: "id"
+ - <OperationDefinition>
+ Operation: Operation("subscription")
+ Name: "StoryLikeSubscription"
+ VariableDefinitions: [VariableDefinition]
+ - <VariableDefinition>
+ Variable: "input"
+ Type: StoryLikeSubscribeInput
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "storyLikeSubscribe"
+ Name: "storyLikeSubscribe"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "input"
+ Value: $input
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "story"
+ Name: "story"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "likers"
+ Name: "likers"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "count"
+ Name: "count"
+ - <Field>
+ Alias: "likeSentence"
+ Name: "likeSentence"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "text"
+ Name: "text"
+ - <OperationDefinition>
+ Operation: Operation("query")
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "unnamed"
+ Name: "unnamed"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "truthy"
+ Value: true
+ - <Argument>
+ Name: "falsey"
+ Value: false
+ - <Argument>
+ Name: "nullish"
+ Value: null
+ - <Field>
+ Alias: "query"
+ Name: "query"
+ Fragments: [FragmentDefinition]
+ - <FragmentDefinition>
+ Name: "frag"
+ TypeCondition: "Friend"
+ SelectionSet: [Selection]
+ - <Field>
+ Alias: "foo"
+ Name: "foo"
+ Arguments: [Argument]
+ - <Argument>
+ Name: "size"
+ Value: $size
+ - <Argument>
+ Name: "bar"
+ Value: $b
+ - <Argument>
+ Name: "obj"
+ Value: {"key":"value","block":"block string uses \"\"\""}
diff --git a/vendor/github.com/vektah/gqlparser/parser/schema.go b/vendor/github.com/vektah/gqlparser/parser/schema.go
new file mode 100644
index 00000000..f409f1f4
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/schema.go
@@ -0,0 +1,503 @@
+package parser
+
+import (
+ . "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/lexer"
+)
+
+func ParseSchema(source *Source) (*SchemaDocument, *gqlerror.Error) {
+ p := parser{
+ lexer: lexer.New(source),
+ }
+ return p.parseSchemaDocument(), p.err
+}
+
+func (p *parser) parseSchemaDocument() *SchemaDocument {
+ var doc SchemaDocument
+ doc.Position = p.peekPos()
+ for p.peek().Kind != lexer.EOF {
+ if p.err != nil {
+ return nil
+ }
+
+ var description string
+ if p.peek().Kind == lexer.BlockString || p.peek().Kind == lexer.String {
+ description = p.parseDescription()
+ }
+
+ if p.peek().Kind != lexer.Name {
+ p.unexpectedError()
+ break
+ }
+
+ switch p.peek().Value {
+ case "scalar", "type", "interface", "union", "enum", "input":
+ doc.Definitions = append(doc.Definitions, p.parseTypeSystemDefinition(description))
+ case "schema":
+ doc.Schema = append(doc.Schema, p.parseSchemaDefinition(description))
+ case "directive":
+ doc.Directives = append(doc.Directives, p.parseDirectiveDefinition(description))
+ case "extend":
+ if description != "" {
+ p.unexpectedToken(p.prev)
+ }
+ p.parseTypeSystemExtension(&doc)
+ default:
+ p.unexpectedError()
+ return nil
+ }
+ }
+
+ return &doc
+}
+
+func (p *parser) parseDescription() string {
+ token := p.peek()
+
+ if token.Kind != lexer.BlockString && token.Kind != lexer.String {
+ return ""
+ }
+
+ return p.next().Value
+}
+
+func (p *parser) parseTypeSystemDefinition(description string) *Definition {
+ tok := p.peek()
+ if tok.Kind != lexer.Name {
+ p.unexpectedError()
+ return nil
+ }
+
+ switch tok.Value {
+ case "scalar":
+ return p.parseScalarTypeDefinition(description)
+ case "type":
+ return p.parseObjectTypeDefinition(description)
+ case "interface":
+ return p.parseInterfaceTypeDefinition(description)
+ case "union":
+ return p.parseUnionTypeDefinition(description)
+ case "enum":
+ return p.parseEnumTypeDefinition(description)
+ case "input":
+ return p.parseInputObjectTypeDefinition(description)
+ default:
+ p.unexpectedError()
+ return nil
+ }
+}
+
+func (p *parser) parseSchemaDefinition(description string) *SchemaDefinition {
+ p.expectKeyword("schema")
+
+ def := SchemaDefinition{Description: description}
+ def.Position = p.peekPos()
+ def.Description = description
+ def.Directives = p.parseDirectives(true)
+
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition())
+ })
+ return &def
+}
+
+func (p *parser) parseOperationTypeDefinition() *OperationTypeDefinition {
+ var op OperationTypeDefinition
+ op.Position = p.peekPos()
+ op.Operation = p.parseOperationType()
+ p.expect(lexer.Colon)
+ op.Type = p.parseName()
+ return &op
+}
+
+func (p *parser) parseScalarTypeDefinition(description string) *Definition {
+ p.expectKeyword("scalar")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Scalar
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ return &def
+}
+
+func (p *parser) parseObjectTypeDefinition(description string) *Definition {
+ p.expectKeyword("type")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Object
+ def.Description = description
+ def.Name = p.parseName()
+ def.Interfaces = p.parseImplementsInterfaces()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseFieldsDefinition()
+ return &def
+}
+
+func (p *parser) parseImplementsInterfaces() []string {
+ var types []string
+ if p.peek().Value == "implements" {
+ p.next()
+ // optional leading ampersand
+ p.skip(lexer.Amp)
+
+ types = append(types, p.parseName())
+ for p.skip(lexer.Amp) && p.err == nil {
+ types = append(types, p.parseName())
+ }
+ }
+ return types
+}
+
+func (p *parser) parseFieldsDefinition() FieldList {
+ var defs FieldList
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ defs = append(defs, p.parseFieldDefinition())
+ })
+ return defs
+}
+
+func (p *parser) parseFieldDefinition() *FieldDefinition {
+ var def FieldDefinition
+ def.Position = p.peekPos()
+ def.Description = p.parseDescription()
+ def.Name = p.parseName()
+ def.Arguments = p.parseArgumentDefs()
+ p.expect(lexer.Colon)
+ def.Type = p.parseTypeReference()
+ def.Directives = p.parseDirectives(true)
+
+ return &def
+}
+
+func (p *parser) parseArgumentDefs() ArgumentDefinitionList {
+ var args ArgumentDefinitionList
+ p.many(lexer.ParenL, lexer.ParenR, func() {
+ args = append(args, p.parseArgumentDef())
+ })
+ return args
+}
+
+func (p *parser) parseArgumentDef() *ArgumentDefinition {
+ var def ArgumentDefinition
+ def.Position = p.peekPos()
+ def.Description = p.parseDescription()
+ def.Name = p.parseName()
+ p.expect(lexer.Colon)
+ def.Type = p.parseTypeReference()
+ if p.skip(lexer.Equals) {
+ def.DefaultValue = p.parseValueLiteral(true)
+ }
+ def.Directives = p.parseDirectives(true)
+ return &def
+}
+
+func (p *parser) parseInputValueDef() *FieldDefinition {
+ var def FieldDefinition
+ def.Position = p.peekPos()
+ def.Description = p.parseDescription()
+ def.Name = p.parseName()
+ p.expect(lexer.Colon)
+ def.Type = p.parseTypeReference()
+ if p.skip(lexer.Equals) {
+ def.DefaultValue = p.parseValueLiteral(true)
+ }
+ def.Directives = p.parseDirectives(true)
+ return &def
+}
+
+func (p *parser) parseInterfaceTypeDefinition(description string) *Definition {
+ p.expectKeyword("interface")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Interface
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseFieldsDefinition()
+ return &def
+}
+
+func (p *parser) parseUnionTypeDefinition(description string) *Definition {
+ p.expectKeyword("union")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Union
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Types = p.parseUnionMemberTypes()
+ return &def
+}
+
+func (p *parser) parseUnionMemberTypes() []string {
+ var types []string
+ if p.skip(lexer.Equals) {
+ // optional leading pipe
+ p.skip(lexer.Pipe)
+
+ types = append(types, p.parseName())
+ for p.skip(lexer.Pipe) && p.err == nil {
+ types = append(types, p.parseName())
+ }
+ }
+ return types
+}
+
+func (p *parser) parseEnumTypeDefinition(description string) *Definition {
+ p.expectKeyword("enum")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Enum
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.EnumValues = p.parseEnumValuesDefinition()
+ return &def
+}
+
+func (p *parser) parseEnumValuesDefinition() EnumValueList {
+ var values EnumValueList
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ values = append(values, p.parseEnumValueDefinition())
+ })
+ return values
+}
+
+func (p *parser) parseEnumValueDefinition() *EnumValueDefinition {
+ return &EnumValueDefinition{
+ Position: p.peekPos(),
+ Description: p.parseDescription(),
+ Name: p.parseName(),
+ Directives: p.parseDirectives(true),
+ }
+}
+
+func (p *parser) parseInputObjectTypeDefinition(description string) *Definition {
+ p.expectKeyword("input")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = InputObject
+ def.Description = description
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseInputFieldsDefinition()
+ return &def
+}
+
+func (p *parser) parseInputFieldsDefinition() FieldList {
+ var values FieldList
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ values = append(values, p.parseInputValueDef())
+ })
+ return values
+}
+
+func (p *parser) parseTypeSystemExtension(doc *SchemaDocument) {
+ p.expectKeyword("extend")
+
+ switch p.peek().Value {
+ case "schema":
+ doc.SchemaExtension = append(doc.SchemaExtension, p.parseSchemaExtension())
+ case "scalar":
+ doc.Extensions = append(doc.Extensions, p.parseScalarTypeExtension())
+ case "type":
+ doc.Extensions = append(doc.Extensions, p.parseObjectTypeExtension())
+ case "interface":
+ doc.Extensions = append(doc.Extensions, p.parseInterfaceTypeExtension())
+ case "union":
+ doc.Extensions = append(doc.Extensions, p.parseUnionTypeExtension())
+ case "enum":
+ doc.Extensions = append(doc.Extensions, p.parseEnumTypeExtension())
+ case "input":
+ doc.Extensions = append(doc.Extensions, p.parseInputObjectTypeExtension())
+ default:
+ p.unexpectedError()
+ }
+}
+
+func (p *parser) parseSchemaExtension() *SchemaDefinition {
+ p.expectKeyword("schema")
+
+ var def SchemaDefinition
+ def.Position = p.peekPos()
+ def.Directives = p.parseDirectives(true)
+ p.many(lexer.BraceL, lexer.BraceR, func() {
+ def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition())
+ })
+ if len(def.Directives) == 0 && len(def.OperationTypes) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseScalarTypeExtension() *Definition {
+ p.expectKeyword("scalar")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Scalar
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ if len(def.Directives) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseObjectTypeExtension() *Definition {
+ p.expectKeyword("type")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Object
+ def.Name = p.parseName()
+ def.Interfaces = p.parseImplementsInterfaces()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseFieldsDefinition()
+ if len(def.Interfaces) == 0 && len(def.Directives) == 0 && len(def.Fields) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseInterfaceTypeExtension() *Definition {
+ p.expectKeyword("interface")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Interface
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Fields = p.parseFieldsDefinition()
+ if len(def.Directives) == 0 && len(def.Fields) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseUnionTypeExtension() *Definition {
+ p.expectKeyword("union")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Union
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.Types = p.parseUnionMemberTypes()
+
+ if len(def.Directives) == 0 && len(def.Types) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseEnumTypeExtension() *Definition {
+ p.expectKeyword("enum")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = Enum
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(true)
+ def.EnumValues = p.parseEnumValuesDefinition()
+ if len(def.Directives) == 0 && len(def.EnumValues) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseInputObjectTypeExtension() *Definition {
+ p.expectKeyword("input")
+
+ var def Definition
+ def.Position = p.peekPos()
+ def.Kind = InputObject
+ def.Name = p.parseName()
+ def.Directives = p.parseDirectives(false)
+ def.Fields = p.parseInputFieldsDefinition()
+ if len(def.Directives) == 0 && len(def.Fields) == 0 {
+ p.unexpectedError()
+ }
+ return &def
+}
+
+func (p *parser) parseDirectiveDefinition(description string) *DirectiveDefinition {
+ p.expectKeyword("directive")
+ p.expect(lexer.At)
+
+ var def DirectiveDefinition
+ def.Position = p.peekPos()
+ def.Description = description
+ def.Name = p.parseName()
+ def.Arguments = p.parseArgumentDefs()
+
+ p.expectKeyword("on")
+ def.Locations = p.parseDirectiveLocations()
+ return &def
+}
+
+func (p *parser) parseDirectiveLocations() []DirectiveLocation {
+ p.skip(lexer.Pipe)
+
+ locations := []DirectiveLocation{p.parseDirectiveLocation()}
+
+ for p.skip(lexer.Pipe) && p.err == nil {
+ locations = append(locations, p.parseDirectiveLocation())
+ }
+
+ return locations
+}
+
+func (p *parser) parseDirectiveLocation() DirectiveLocation {
+ name := p.expect(lexer.Name)
+
+ switch name.Value {
+ case `QUERY`:
+ return LocationQuery
+ case `MUTATION`:
+ return LocationMutation
+ case `SUBSCRIPTION`:
+ return LocationSubscription
+ case `FIELD`:
+ return LocationField
+ case `FRAGMENT_DEFINITION`:
+ return LocationFragmentDefinition
+ case `FRAGMENT_SPREAD`:
+ return LocationFragmentSpread
+ case `INLINE_FRAGMENT`:
+ return LocationInlineFragment
+ case `SCHEMA`:
+ return LocationSchema
+ case `SCALAR`:
+ return LocationScalar
+ case `OBJECT`:
+ return LocationObject
+ case `FIELD_DEFINITION`:
+ return LocationFieldDefinition
+ case `ARGUMENT_DEFINITION`:
+ return LocationArgumentDefinition
+ case `INTERFACE`:
+ return LocationInterface
+ case `UNION`:
+ return LocationUnion
+ case `ENUM`:
+ return LocationEnum
+ case `ENUM_VALUE`:
+ return LocationEnumValue
+ case `INPUT_OBJECT`:
+ return LocationInputObject
+ case `INPUT_FIELD_DEFINITION`:
+ return LocationInputFieldDefinition
+ }
+
+ p.unexpectedToken(name)
+ return ""
+}
diff --git a/vendor/github.com/vektah/gqlparser/parser/schema_test.yml b/vendor/github.com/vektah/gqlparser/parser/schema_test.yml
new file mode 100644
index 00000000..c65239a5
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/parser/schema_test.yml
@@ -0,0 +1,505 @@
+object types:
+ - name: simple
+ input: |
+ type Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+ - name: with description
+ input: |
+ "Description"
+ type Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Description: "Description"
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+ - name: with block description
+ input: |
+ """
+ Description
+ """
+ # Even with comments between them
+ type Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Description: "Description"
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+ - name: with field arg
+ input: |
+ type Hello {
+ world(flag: Boolean): String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Arguments: [ArgumentDefinition]
+ - <ArgumentDefinition>
+ Name: "flag"
+ Type: Boolean
+ Type: String
+
+ - name: with field arg and default value
+ input: |
+ type Hello {
+ world(flag: Boolean = true): String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Arguments: [ArgumentDefinition]
+ - <ArgumentDefinition>
+ Name: "flag"
+ DefaultValue: true
+ Type: Boolean
+ Type: String
+
+ - name: with field list arg
+ input: |
+ type Hello {
+ world(things: [String]): String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Arguments: [ArgumentDefinition]
+ - <ArgumentDefinition>
+ Name: "things"
+ Type: [String]
+ Type: String
+
+ - name: with two args
+ input: |
+ type Hello {
+ world(argOne: Boolean, argTwo: Int): String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Arguments: [ArgumentDefinition]
+ - <ArgumentDefinition>
+ Name: "argOne"
+ Type: Boolean
+ - <ArgumentDefinition>
+ Name: "argTwo"
+ Type: Int
+ Type: String
+
+type extensions:
+ - name: Object extension
+ input: |
+ extend type Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Extensions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+ - name: without any fields
+ input: "extend type Hello implements Greeting"
+ ast: |
+ <SchemaDocument>
+ Extensions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "Greeting"
+
+ - name: without fields twice
+ input: |
+ extend type Hello implements Greeting
+ extend type Hello implements SecondGreeting
+ ast: |
+ <SchemaDocument>
+ Extensions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "Greeting"
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "SecondGreeting"
+
+ - name: without anything errors
+ input: "extend type Hello"
+ error:
+ message: "Unexpected <EOF>"
+ locations: [{ line: 1, column: 18 }]
+
+ - name: can have descriptions # hmm, this might not be spec compliant...
+ input: |
+ "Description"
+ extend type Hello {
+ world: String
+ }
+ error:
+ message: 'Unexpected String "Description"'
+ locations: [{ line: 1, column: 2 }]
+
+ - name: can not have descriptions on types
+ input: |
+ extend "Description" type Hello {
+ world: String
+ }
+ error:
+ message: Unexpected String "Description"
+ locations: [{ line: 1, column: 9 }]
+
+schema definition:
+ - name: simple
+ input: |
+ schema {
+ query: Query
+ }
+ ast: |
+ <SchemaDocument>
+ Schema: [SchemaDefinition]
+ - <SchemaDefinition>
+ OperationTypes: [OperationTypeDefinition]
+ - <OperationTypeDefinition>
+ Operation: Operation("query")
+ Type: "Query"
+
+schema extensions:
+ - name: simple
+ input: |
+ extend schema {
+ mutation: Mutation
+ }
+ ast: |
+ <SchemaDocument>
+ SchemaExtension: [SchemaDefinition]
+ - <SchemaDefinition>
+ OperationTypes: [OperationTypeDefinition]
+ - <OperationTypeDefinition>
+ Operation: Operation("mutation")
+ Type: "Mutation"
+
+ - name: directive only
+ input: "extend schema @directive"
+ ast: |
+ <SchemaDocument>
+ SchemaExtension: [SchemaDefinition]
+ - <SchemaDefinition>
+ Directives: [Directive]
+ - <Directive>
+ Name: "directive"
+
+ - name: without anything errors
+ input: "extend schema"
+ error:
+ message: "Unexpected <EOF>"
+ locations: [{ line: 1, column: 14}]
+
+type extensions:
+ - name: all can have directives
+ input: |
+ extend scalar Foo @deprecated
+ extend type Foo @deprecated
+ extend interface Foo @deprecated
+ extend union Foo @deprecated
+ extend enum Foo @deprecated
+ extend input Foo @deprecated
+ ast: |
+ <SchemaDocument>
+ Extensions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("SCALAR")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("INTERFACE")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("UNION")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("ENUM")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+ - <Definition>
+ Kind: DefinitionKind("INPUT_OBJECT")
+ Name: "Foo"
+ Directives: [Directive]
+ - <Directive>
+ Name: "deprecated"
+
+
+inheritance:
+ - name: single
+ input: "type Hello implements World { field: String }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "World"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "field"
+ Type: String
+
+ - name: multi
+ input: "type Hello implements Wo & rld { field: String }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "Wo"
+ - "rld"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "field"
+ Type: String
+
+ - name: multi with leading amp
+ input: "type Hello implements & Wo & rld { field: String }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("OBJECT")
+ Name: "Hello"
+ Interfaces: [string]
+ - "Wo"
+ - "rld"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "field"
+ Type: String
+
+enums:
+ - name: single value
+ input: "enum Hello { WORLD }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("ENUM")
+ Name: "Hello"
+ EnumValues: [EnumValueDefinition]
+ - <EnumValueDefinition>
+ Name: "WORLD"
+
+ - name: double value
+ input: "enum Hello { WO, RLD }"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("ENUM")
+ Name: "Hello"
+ EnumValues: [EnumValueDefinition]
+ - <EnumValueDefinition>
+ Name: "WO"
+ - <EnumValueDefinition>
+ Name: "RLD"
+
+interface:
+ - name: simple
+ input: |
+ interface Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("INTERFACE")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+unions:
+ - name: simple
+ input: "union Hello = World"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("UNION")
+ Name: "Hello"
+ Types: [string]
+ - "World"
+
+ - name: with two types
+ input: "union Hello = Wo | Rld"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("UNION")
+ Name: "Hello"
+ Types: [string]
+ - "Wo"
+ - "Rld"
+
+ - name: with leading pipe
+ input: "union Hello = | Wo | Rld"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("UNION")
+ Name: "Hello"
+ Types: [string]
+ - "Wo"
+ - "Rld"
+
+ - name: cant be empty
+ input: "union Hello = || Wo | Rld"
+ error:
+ message: "Expected Name, found |"
+ locations: [{ line: 1, column: 16 }]
+
+ - name: cant double pipe
+ input: "union Hello = Wo || Rld"
+ error:
+ message: "Expected Name, found |"
+ locations: [{ line: 1, column: 19 }]
+
+ - name: cant have trailing pipe
+ input: "union Hello = | Wo | Rld |"
+ error:
+ message: "Expected Name, found <EOF>"
+ locations: [{ line: 1, column: 27 }]
+
+scalar:
+ - name: simple
+ input: "scalar Hello"
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("SCALAR")
+ Name: "Hello"
+
+input object:
+ - name: simple
+ input: |
+ input Hello {
+ world: String
+ }
+ ast: |
+ <SchemaDocument>
+ Definitions: [Definition]
+ - <Definition>
+ Kind: DefinitionKind("INPUT_OBJECT")
+ Name: "Hello"
+ Fields: [FieldDefinition]
+ - <FieldDefinition>
+ Name: "world"
+ Type: String
+
+ - name: can not have args
+ input: |
+ input Hello {
+ world(foo: Int): String
+ }
+ error:
+ message: "Expected :, found ("
+ locations: [{ line: 2, column: 8 }]
+
+directives:
+ - name: simple
+ input: directive @foo on FIELD
+ ast: |
+ <SchemaDocument>
+ Directives: [DirectiveDefinition]
+ - <DirectiveDefinition>
+ Name: "foo"
+ Locations: [DirectiveLocation]
+ - DirectiveLocation("FIELD")
+
+ - name: invalid location
+ input: "directive @foo on FIELD | INCORRECT_LOCATION"
+ error:
+ message: 'Unexpected Name "INCORRECT_LOCATION"'
+ locations: [{ line: 1, column: 27 }]
+
diff --git a/vendor/github.com/vektah/gqlparser/readme.md b/vendor/github.com/vektah/gqlparser/readme.md
new file mode 100644
index 00000000..976d202b
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/readme.md
@@ -0,0 +1,17 @@
+gqlparser [![CircleCI](https://badgen.net/circleci/github/vektah/gqlparser/master)](https://circleci.com/gh/vektah/gqlparser) [![Go Report Card](https://goreportcard.com/badge/github.com/vektah/gqlparser)](https://goreportcard.com/report/github.com/vektah/gqlparser) [![Coverage Status](https://badgen.net/coveralls/c/github/vektah/gqlparser)](https://coveralls.io/github/vektah/gqlparser?branch=master)
+===
+
+This is a parser for graphql, written to mirror the graphql-js reference implementation as closely while remaining idiomatic and easy to use.
+
+spec target: June 2018 (Schema definition language, block strings as descriptions, error paths & extension)
+
+This parser is used by [gqlgen](https://github.com/99designs/gqlgen), and it should be reasonablly stable.
+
+Guiding principles:
+
+ - maintainability: It should be easy to stay up to date with the spec
+ - well tested: It shouldnt need a graphql server to validate itself. Changes to this repo should be self contained.
+ - server agnostic: It should be usable by any of the graphql server implementations, and any graphql client tooling.
+ - idiomatic & stable api: It should follow go best practices, especially around forwards compatibility.
+ - fast: Where it doesnt impact on the above it should be fast. Avoid unnecessary allocs in hot paths.
+ - close to reference: Where it doesnt impact on the above, it should stay close to the [graphql/graphql-js](github.com/graphql/graphql-js) reference implementation.
diff --git a/vendor/github.com/vektah/gqlparser/validator/error.go b/vendor/github.com/vektah/gqlparser/validator/error.go
new file mode 100644
index 00000000..f354dee5
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/error.go
@@ -0,0 +1,55 @@
+package validator
+
+import (
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+type ErrorOption func(err *gqlerror.Error)
+
+func Message(msg string, args ...interface{}) ErrorOption {
+ return func(err *gqlerror.Error) {
+ err.Message += fmt.Sprintf(msg, args...)
+ }
+}
+
+func At(position *ast.Position) ErrorOption {
+ return func(err *gqlerror.Error) {
+ if position == nil {
+ return
+ }
+ err.Locations = append(err.Locations, gqlerror.Location{
+ Line: position.Line,
+ Column: position.Column,
+ })
+ if position.Src.Name != "" {
+ err.SetFile(position.Src.Name)
+ }
+ }
+}
+
+func SuggestListQuoted(prefix string, typed string, suggestions []string) ErrorOption {
+ suggested := SuggestionList(typed, suggestions)
+ return func(err *gqlerror.Error) {
+ if len(suggested) > 0 {
+ err.Message += " " + prefix + " " + QuotedOrList(suggested...) + "?"
+ }
+ }
+}
+
+func SuggestListUnquoted(prefix string, typed string, suggestions []string) ErrorOption {
+ suggested := SuggestionList(typed, suggestions)
+ return func(err *gqlerror.Error) {
+ if len(suggested) > 0 {
+ err.Message += " " + prefix + " " + OrList(suggested...) + "?"
+ }
+ }
+}
+
+func Suggestf(suggestion string, args ...interface{}) ErrorOption {
+ return func(err *gqlerror.Error) {
+ err.Message += " Did you mean " + fmt.Sprintf(suggestion, args...) + "?"
+ }
+}
diff --git a/vendor/github.com/vektah/gqlgen/neelance/tests/testdata/LICENSE b/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE
index fce4519e..fce4519e 100644
--- a/vendor/github.com/vektah/gqlgen/neelance/tests/testdata/LICENSE
+++ b/vendor/github.com/vektah/gqlparser/validator/imported/LICENSE
diff --git a/vendor/github.com/vektah/gqlparser/validator/messaging.go b/vendor/github.com/vektah/gqlparser/validator/messaging.go
new file mode 100644
index 00000000..f1ab5873
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/messaging.go
@@ -0,0 +1,39 @@
+package validator
+
+import "bytes"
+
+// Given [ A, B, C ] return '"A", "B", or "C"'.
+func QuotedOrList(items ...string) string {
+ itemsQuoted := make([]string, len(items))
+ for i, item := range items {
+ itemsQuoted[i] = `"` + item + `"`
+ }
+ return OrList(itemsQuoted...)
+}
+
+// Given [ A, B, C ] return 'A, B, or C'.
+func OrList(items ...string) string {
+ var buf bytes.Buffer
+
+ if len(items) > 5 {
+ items = items[:5]
+ }
+ if len(items) == 2 {
+ buf.WriteString(items[0])
+ buf.WriteString(" or ")
+ buf.WriteString(items[1])
+ return buf.String()
+ }
+
+ for i, item := range items {
+ if i != 0 {
+ if i == len(items)-1 {
+ buf.WriteString(", or ")
+ } else {
+ buf.WriteString(", ")
+ }
+ }
+ buf.WriteString(item)
+ }
+ return buf.String()
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/prelude.go b/vendor/github.com/vektah/gqlparser/validator/prelude.go
new file mode 100644
index 00000000..80ce8a21
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/prelude.go
@@ -0,0 +1,5 @@
+package validator
+
+import "github.com/vektah/gqlparser/ast"
+
+var Prelude = &ast.Source{Name: "prelude.graphql", Input: "# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema\n\n# The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.\nscalar Int\n\n# The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).\nscalar Float\n\n# The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.\nscalar String\n\n# The `Boolean` scalar type represents ` + \"`\" + `true` + \"`\" + ` or ` + \"`\" + `false` + \"`\" + `.\nscalar Boolean\n\n# The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as \"4\") or integer (such as 4) input value will be accepted as an ID.\nscalar ID\n\n# The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument.\ndirective @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT\n\n# The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument.\ndirective @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT\n\n# The @deprecated directive is used within the type system definition language to indicate deprecated portions of a GraphQL service’s schema, such as deprecated fields on a type or deprecated enum values.\ndirective @deprecated(reason: String = \"No longer supported\") on FIELD_DEFINITION | ENUM_VALUE\n\ntype __Schema {\n types: [__Type!]!\n queryType: __Type!\n mutationType: __Type\n subscriptionType: __Type\n directives: [__Directive!]!\n}\n\ntype __Type {\n kind: __TypeKind!\n name: String\n description: String\n\n # OBJECT and INTERFACE only\n fields(includeDeprecated: Boolean = false): [__Field!]\n\n # OBJECT only\n interfaces: [__Type!]\n\n # INTERFACE and UNION only\n possibleTypes: [__Type!]\n\n # ENUM only\n enumValues(includeDeprecated: Boolean = false): [__EnumValue!]\n\n # INPUT_OBJECT only\n inputFields: [__InputValue!]\n\n # NON_NULL and LIST only\n ofType: __Type\n}\n\ntype __Field {\n name: String!\n description: String\n args: [__InputValue!]!\n type: __Type!\n isDeprecated: Boolean!\n deprecationReason: String\n}\n\ntype __InputValue {\n name: String!\n description: String\n type: __Type!\n defaultValue: String\n}\n\ntype __EnumValue {\n name: String!\n description: String\n isDeprecated: Boolean!\n deprecationReason: String\n}\n\nenum __TypeKind {\n SCALAR\n OBJECT\n INTERFACE\n UNION\n ENUM\n INPUT_OBJECT\n LIST\n NON_NULL\n}\n\ntype __Directive {\n name: String!\n description: String\n locations: [__DirectiveLocation!]!\n args: [__InputValue!]!\n}\n\nenum __DirectiveLocation {\n QUERY\n MUTATION\n SUBSCRIPTION\n FIELD\n FRAGMENT_DEFINITION\n FRAGMENT_SPREAD\n INLINE_FRAGMENT\n SCHEMA\n SCALAR\n OBJECT\n FIELD_DEFINITION\n ARGUMENT_DEFINITION\n INTERFACE\n UNION\n ENUM\n ENUM_VALUE\n INPUT_OBJECT\n INPUT_FIELD_DEFINITION\n}\n"}
diff --git a/vendor/github.com/vektah/gqlparser/validator/prelude.graphql b/vendor/github.com/vektah/gqlparser/validator/prelude.graphql
new file mode 100644
index 00000000..2c7f7c02
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/prelude.graphql
@@ -0,0 +1,119 @@
+# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema
+
+# The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+scalar Int
+
+# The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
+scalar Float
+
+# The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+scalar String
+
+# The `Boolean` scalar type represents ` + "`" + `true` + "`" + ` or ` + "`" + `false` + "`" + `.
+scalar Boolean
+
+# The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as "4") or integer (such as 4) input value will be accepted as an ID.
+scalar ID
+
+# The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument.
+directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
+
+# The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument.
+directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
+
+# The @deprecated directive is used within the type system definition language to indicate deprecated portions of a GraphQL service’s schema, such as deprecated fields on a type or deprecated enum values.
+directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITION | ENUM_VALUE
+
+type __Schema {
+ types: [__Type!]!
+ queryType: __Type!
+ mutationType: __Type
+ subscriptionType: __Type
+ directives: [__Directive!]!
+}
+
+type __Type {
+ kind: __TypeKind!
+ name: String
+ description: String
+
+ # OBJECT and INTERFACE only
+ fields(includeDeprecated: Boolean = false): [__Field!]
+
+ # OBJECT only
+ interfaces: [__Type!]
+
+ # INTERFACE and UNION only
+ possibleTypes: [__Type!]
+
+ # ENUM only
+ enumValues(includeDeprecated: Boolean = false): [__EnumValue!]
+
+ # INPUT_OBJECT only
+ inputFields: [__InputValue!]
+
+ # NON_NULL and LIST only
+ ofType: __Type
+}
+
+type __Field {
+ name: String!
+ description: String
+ args: [__InputValue!]!
+ type: __Type!
+ isDeprecated: Boolean!
+ deprecationReason: String
+}
+
+type __InputValue {
+ name: String!
+ description: String
+ type: __Type!
+ defaultValue: String
+}
+
+type __EnumValue {
+ name: String!
+ description: String
+ isDeprecated: Boolean!
+ deprecationReason: String
+}
+
+enum __TypeKind {
+ SCALAR
+ OBJECT
+ INTERFACE
+ UNION
+ ENUM
+ INPUT_OBJECT
+ LIST
+ NON_NULL
+}
+
+type __Directive {
+ name: String!
+ description: String
+ locations: [__DirectiveLocation!]!
+ args: [__InputValue!]!
+}
+
+enum __DirectiveLocation {
+ QUERY
+ MUTATION
+ SUBSCRIPTION
+ FIELD
+ FRAGMENT_DEFINITION
+ FRAGMENT_SPREAD
+ INLINE_FRAGMENT
+ SCHEMA
+ SCALAR
+ OBJECT
+ FIELD_DEFINITION
+ ARGUMENT_DEFINITION
+ INTERFACE
+ UNION
+ ENUM
+ ENUM_VALUE
+ INPUT_OBJECT
+ INPUT_FIELD_DEFINITION
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
new file mode 100644
index 00000000..69148d52
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/fields_on_correct_type.go
@@ -0,0 +1,86 @@
+package validator
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("FieldsOnCorrectType", func(observers *Events, addError AddErrFunc) {
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.ObjectDefinition == nil || field.Definition != nil {
+ return
+ }
+
+ message := fmt.Sprintf(`Cannot query field "%s" on type "%s".`, field.Name, field.ObjectDefinition.Name)
+
+ if suggestedTypeNames := getSuggestedTypeNames(walker, field.ObjectDefinition, field.Name); suggestedTypeNames != nil {
+ message += " Did you mean to use an inline fragment on " + QuotedOrList(suggestedTypeNames...) + "?"
+ } else if suggestedFieldNames := getSuggestedFieldNames(field.ObjectDefinition, field.Name); suggestedFieldNames != nil {
+ message += " Did you mean " + QuotedOrList(suggestedFieldNames...) + "?"
+ }
+
+ addError(
+ Message(message),
+ At(field.Position),
+ )
+ })
+ })
+}
+
+// Go through all of the implementations of type, as well as the interfaces
+// that they implement. If any of those types include the provided field,
+// suggest them, sorted by how often the type is referenced, starting
+// with Interfaces.
+func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) []string {
+ if !parent.IsAbstractType() {
+ return nil
+ }
+
+ var suggestedObjectTypes []string
+ var suggestedInterfaceTypes []string
+ interfaceUsageCount := map[string]int{}
+
+ for _, possibleType := range walker.Schema.GetPossibleTypes(parent) {
+ field := possibleType.Fields.ForName(name)
+ if field == nil {
+ continue
+ }
+
+ suggestedObjectTypes = append(suggestedObjectTypes, possibleType.Name)
+
+ for _, possibleInterface := range possibleType.Interfaces {
+ interfaceField := walker.Schema.Types[possibleInterface]
+ if interfaceField != nil && interfaceField.Fields.ForName(name) != nil {
+ if interfaceUsageCount[possibleInterface] == 0 {
+ suggestedInterfaceTypes = append(suggestedInterfaceTypes, possibleInterface)
+ }
+ interfaceUsageCount[possibleInterface]++
+ }
+ }
+ }
+
+ sort.SliceStable(suggestedInterfaceTypes, func(i, j int) bool {
+ return interfaceUsageCount[suggestedInterfaceTypes[i]] > interfaceUsageCount[suggestedInterfaceTypes[j]]
+ })
+
+ return append(suggestedInterfaceTypes, suggestedObjectTypes...)
+}
+
+// For the field name provided, determine if there are any similar field names
+// that may be the result of a typo.
+func getSuggestedFieldNames(parent *ast.Definition, name string) []string {
+ if parent.Kind != ast.Object && parent.Kind != ast.Interface {
+ return nil
+ }
+
+ var possibleFieldNames []string
+ for _, field := range parent.Fields {
+ possibleFieldNames = append(possibleFieldNames, field.Name)
+ }
+
+ return SuggestionList(name, possibleFieldNames)
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
new file mode 100644
index 00000000..a4a48246
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/fragments_on_composite_types.go
@@ -0,0 +1,39 @@
+package validator
+
+import (
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("FragmentsOnCompositeTypes", func(observers *Events, addError AddErrFunc) {
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ fragmentType := walker.Schema.Types[inlineFragment.TypeCondition]
+ if fragmentType == nil || fragmentType.IsCompositeType() {
+ return
+ }
+
+ message := fmt.Sprintf(`Fragment cannot condition on non composite type "%s".`, inlineFragment.TypeCondition)
+
+ addError(
+ Message(message),
+ At(inlineFragment.Position),
+ )
+ })
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() {
+ return
+ }
+
+ message := fmt.Sprintf(`Fragment "%s" cannot condition on non composite type "%s".`, fragment.Name, fragment.TypeCondition)
+
+ addError(
+ Message(message),
+ At(fragment.Position),
+ )
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
new file mode 100644
index 00000000..83b47387
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_argument_names.go
@@ -0,0 +1,57 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) {
+ // A GraphQL field is only valid if all supplied arguments are defined by that field.
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.Definition == nil {
+ return
+ }
+ for _, arg := range field.Arguments {
+ def := field.Definition.Arguments.ForName(arg.Name)
+ if def != nil {
+ continue
+ }
+
+ var suggestions []string
+ for _, argDef := range field.Definition.Arguments {
+ suggestions = append(suggestions, argDef.Name)
+ }
+
+ addError(
+ Message(`Unknown argument "%s" on field "%s" of type "%s".`, arg.Name, field.Name, field.ObjectDefinition.Name),
+ SuggestListQuoted("Did you mean", arg.Name, suggestions),
+ At(field.Position),
+ )
+ }
+ })
+
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ if directive.Definition == nil {
+ return
+ }
+ for _, arg := range directive.Arguments {
+ def := directive.Definition.Arguments.ForName(arg.Name)
+ if def != nil {
+ continue
+ }
+
+ var suggestions []string
+ for _, argDef := range directive.Definition.Arguments {
+ suggestions = append(suggestions, argDef.Name)
+ }
+
+ addError(
+ Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name),
+ SuggestListQuoted("Did you mean", arg.Name, suggestions),
+ At(directive.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
new file mode 100644
index 00000000..dc4353ef
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_directives.go
@@ -0,0 +1,31 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownDirectives", func(observers *Events, addError AddErrFunc) {
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ if directive.Definition == nil {
+ addError(
+ Message(`Unknown directive "%s".`, directive.Name),
+ At(directive.Position),
+ )
+ return
+ }
+
+ for _, loc := range directive.Definition.Locations {
+ if loc == directive.Location {
+ return
+ }
+ }
+
+ addError(
+ Message(`Directive "%s" may not be used on %s.`, directive.Name, directive.Location),
+ At(directive.Position),
+ )
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
new file mode 100644
index 00000000..ec91588c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_fragment_names.go
@@ -0,0 +1,19 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
+ if fragmentSpread.Definition == nil {
+ addError(
+ Message(`Unknown fragment "%s".`, fragmentSpread.Name),
+ At(fragmentSpread.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
new file mode 100644
index 00000000..223086b3
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/known_type_names.go
@@ -0,0 +1,61 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("KnownTypeNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ for _, vdef := range operation.VariableDefinitions {
+ typeName := vdef.Type.Name()
+ def := walker.Schema.Types[typeName]
+ if def != nil {
+ continue
+ }
+
+ addError(
+ Message(`Unknown type "%s".`, typeName),
+ At(operation.Position),
+ )
+ }
+ })
+
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ typedName := inlineFragment.TypeCondition
+ if typedName == "" {
+ return
+ }
+
+ def := walker.Schema.Types[typedName]
+ if def != nil {
+ return
+ }
+
+ addError(
+ Message(`Unknown type "%s".`, typedName),
+ At(inlineFragment.Position),
+ )
+ })
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ typeName := fragment.TypeCondition
+ def := walker.Schema.Types[typeName]
+ if def != nil {
+ return
+ }
+
+ var possibleTypes []string
+ for _, t := range walker.Schema.Types {
+ possibleTypes = append(possibleTypes, t.Name)
+ }
+
+ addError(
+ Message(`Unknown type "%s".`, typeName),
+ SuggestListQuoted("Did you mean", typeName, possibleTypes),
+ At(fragment.Position),
+ )
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go b/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
new file mode 100644
index 00000000..dd232142
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/lone_anonymous_operation.go
@@ -0,0 +1,19 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("LoneAnonymousOperation", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ if operation.Name == "" && len(walker.Document.Operations) > 1 {
+ addError(
+ Message(`This anonymous operation must be the only defined operation.`),
+ At(operation.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
new file mode 100644
index 00000000..7511529b
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_fragment_cycles.go
@@ -0,0 +1,93 @@
+package validator
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoFragmentCycles", func(observers *Events, addError AddErrFunc) {
+ visitedFrags := make(map[string]bool)
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ var spreadPath []*ast.FragmentSpread
+ spreadPathIndexByName := make(map[string]int)
+
+ var recursive func(fragment *ast.FragmentDefinition)
+ recursive = func(fragment *ast.FragmentDefinition) {
+ if visitedFrags[fragment.Name] {
+ return
+ }
+
+ visitedFrags[fragment.Name] = true
+
+ spreadNodes := getFragmentSpreads(fragment.SelectionSet)
+ if len(spreadNodes) == 0 {
+ return
+ }
+ spreadPathIndexByName[fragment.Name] = len(spreadPath)
+
+ for _, spreadNode := range spreadNodes {
+ spreadName := spreadNode.Name
+
+ cycleIndex, ok := spreadPathIndexByName[spreadName]
+
+ spreadPath = append(spreadPath, spreadNode)
+ if !ok {
+ spreadFragment := walker.Document.Fragments.ForName(spreadName)
+ if spreadFragment != nil {
+ recursive(spreadFragment)
+ }
+ } else {
+ cyclePath := spreadPath[cycleIndex : len(spreadPath)-1]
+ var fragmentNames []string
+ for _, fs := range cyclePath {
+ fragmentNames = append(fragmentNames, fs.Name)
+ }
+ var via string
+ if len(fragmentNames) != 0 {
+ via = fmt.Sprintf(" via %s", strings.Join(fragmentNames, ", "))
+ }
+ addError(
+ Message(`Cannot spread fragment "%s" within itself%s.`, spreadName, via),
+ At(spreadNode.Position),
+ )
+ }
+
+ spreadPath = spreadPath[:len(spreadPath)-1]
+ }
+
+ delete(spreadPathIndexByName, fragment.Name)
+ }
+
+ recursive(fragment)
+ })
+ })
+}
+
+func getFragmentSpreads(node ast.SelectionSet) []*ast.FragmentSpread {
+ var spreads []*ast.FragmentSpread
+
+ setsToVisit := []ast.SelectionSet{node}
+
+ for len(setsToVisit) != 0 {
+ set := setsToVisit[len(setsToVisit)-1]
+ setsToVisit = setsToVisit[:len(setsToVisit)-1]
+
+ for _, selection := range set {
+ switch selection := selection.(type) {
+ case *ast.FragmentSpread:
+ spreads = append(spreads, selection)
+ case *ast.Field:
+ setsToVisit = append(setsToVisit, selection.SelectionSet)
+ case *ast.InlineFragment:
+ setsToVisit = append(setsToVisit, selection.SelectionSet)
+ }
+ }
+ }
+
+ return spreads
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
new file mode 100644
index 00000000..505206be
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_undefined_variables.go
@@ -0,0 +1,28 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoUndefinedVariables", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if walker.CurrentOperation == nil || value.Kind != ast.Variable || value.VariableDefinition != nil {
+ return
+ }
+
+ if walker.CurrentOperation.Name != "" {
+ addError(
+ Message(`Variable "%s" is not defined by operation "%s".`, value, walker.CurrentOperation.Name),
+ At(walker.CurrentOperation.Position),
+ )
+ } else {
+ addError(
+ Message(`Variable "%s" is not defined.`, value),
+ At(value.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
new file mode 100644
index 00000000..4aa835f5
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_fragments.go
@@ -0,0 +1,30 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoUnusedFragments", func(observers *Events, addError AddErrFunc) {
+
+ inFragmentDefinition := false
+ fragmentNameUsed := make(map[string]bool)
+
+ observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
+ if !inFragmentDefinition {
+ fragmentNameUsed[fragmentSpread.Name] = true
+ }
+ })
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ inFragmentDefinition = true
+ if !fragmentNameUsed[fragment.Name] {
+ addError(
+ Message(`Fragment "%s" is never used.`, fragment.Name),
+ At(fragment.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
new file mode 100644
index 00000000..28cf7736
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/no_unused_variables.go
@@ -0,0 +1,30 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ for _, varDef := range operation.VariableDefinitions {
+ if varDef.Used {
+ continue
+ }
+
+ if operation.Name != "" {
+ addError(
+ Message(`Variable "$%s" is never used in operation "%s".`, varDef.Variable, operation.Name),
+ At(varDef.Position),
+ )
+ } else {
+ addError(
+ Message(`Variable "$%s" is never used.`, varDef.Variable),
+ At(varDef.Position),
+ )
+ }
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go b/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
new file mode 100644
index 00000000..52eab3a2
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/overlapping_fields_can_be_merged.go
@@ -0,0 +1,553 @@
+package validator
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+
+ AddRule("OverlappingFieldsCanBeMerged", func(observers *Events, addError AddErrFunc) {
+ /**
+ * Algorithm:
+ *
+ * Conflicts occur when two fields exist in a query which will produce the same
+ * response name, but represent differing values, thus creating a conflict.
+ * The algorithm below finds all conflicts via making a series of comparisons
+ * between fields. In order to compare as few fields as possible, this makes
+ * a series of comparisons "within" sets of fields and "between" sets of fields.
+ *
+ * Given any selection set, a collection produces both a set of fields by
+ * also including all inline fragments, as well as a list of fragments
+ * referenced by fragment spreads.
+ *
+ * A) Each selection set represented in the document first compares "within" its
+ * collected set of fields, finding any conflicts between every pair of
+ * overlapping fields.
+ * Note: This is the *only time* that a the fields "within" a set are compared
+ * to each other. After this only fields "between" sets are compared.
+ *
+ * B) Also, if any fragment is referenced in a selection set, then a
+ * comparison is made "between" the original set of fields and the
+ * referenced fragment.
+ *
+ * C) Also, if multiple fragments are referenced, then comparisons
+ * are made "between" each referenced fragment.
+ *
+ * D) When comparing "between" a set of fields and a referenced fragment, first
+ * a comparison is made between each field in the original set of fields and
+ * each field in the the referenced set of fields.
+ *
+ * E) Also, if any fragment is referenced in the referenced selection set,
+ * then a comparison is made "between" the original set of fields and the
+ * referenced fragment (recursively referring to step D).
+ *
+ * F) When comparing "between" two fragments, first a comparison is made between
+ * each field in the first referenced set of fields and each field in the the
+ * second referenced set of fields.
+ *
+ * G) Also, any fragments referenced by the first must be compared to the
+ * second, and any fragments referenced by the second must be compared to the
+ * first (recursively referring to step F).
+ *
+ * H) When comparing two fields, if both have selection sets, then a comparison
+ * is made "between" both selection sets, first comparing the set of fields in
+ * the first selection set with the set of fields in the second.
+ *
+ * I) Also, if any fragment is referenced in either selection set, then a
+ * comparison is made "between" the other set of fields and the
+ * referenced fragment.
+ *
+ * J) Also, if two fragments are referenced in both selection sets, then a
+ * comparison is made "between" the two fragments.
+ *
+ */
+
+ m := &overlappingFieldsCanBeMergedManager{
+ comparedFragmentPairs: pairSet{data: make(map[string]map[string]bool)},
+ }
+
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(operation.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if walker.CurrentOperation == nil {
+ // When checking both Operation and Fragment, errors are duplicated when processing FragmentDefinition referenced from Operation
+ return
+ }
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(field.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(inlineFragment.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ m.walker = walker
+ conflicts := m.findConflictsWithinSelectionSet(fragment.SelectionSet)
+ for _, conflict := range conflicts {
+ conflict.addFieldsConflictMessage(addError)
+ }
+ })
+ })
+}
+
+type pairSet struct {
+ data map[string]map[string]bool
+}
+
+func (pairSet *pairSet) Add(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) {
+ add := func(a *ast.FragmentSpread, b *ast.FragmentSpread) {
+ m := pairSet.data[a.Name]
+ if m == nil {
+ m = make(map[string]bool)
+ pairSet.data[a.Name] = m
+ }
+ m[b.Name] = areMutuallyExclusive
+ }
+ add(a, b)
+ add(b, a)
+}
+
+func (pairSet *pairSet) Has(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) bool {
+ am, ok := pairSet.data[a.Name]
+ if !ok {
+ return false
+ }
+ result, ok := am[b.Name]
+ if !ok {
+ return false
+ }
+
+ // areMutuallyExclusive being false is a superset of being true,
+ // hence if we want to know if this PairSet "has" these two with no
+ // exclusivity, we have to ensure it was added as such.
+ if !areMutuallyExclusive {
+ return !result
+ }
+
+ return true
+}
+
+type sequentialFieldsMap struct {
+ // We can't use map[string][]*ast.Field. because map is not stable...
+ seq []string
+ data map[string][]*ast.Field
+}
+
+type fieldIterateEntry struct {
+ ResponseName string
+ Fields []*ast.Field
+}
+
+func (m *sequentialFieldsMap) Push(responseName string, field *ast.Field) {
+ fields, ok := m.data[responseName]
+ if !ok {
+ m.seq = append(m.seq, responseName)
+ }
+ fields = append(fields, field)
+ m.data[responseName] = fields
+}
+
+func (m *sequentialFieldsMap) Get(responseName string) ([]*ast.Field, bool) {
+ fields, ok := m.data[responseName]
+ return fields, ok
+}
+
+func (m *sequentialFieldsMap) Iterator() [][]*ast.Field {
+ fieldsList := make([][]*ast.Field, 0, len(m.seq))
+ for _, responseName := range m.seq {
+ fields := m.data[responseName]
+ fieldsList = append(fieldsList, fields)
+ }
+ return fieldsList
+}
+
+func (m *sequentialFieldsMap) KeyValueIterator() []*fieldIterateEntry {
+ fieldEntriesList := make([]*fieldIterateEntry, 0, len(m.seq))
+ for _, responseName := range m.seq {
+ fields := m.data[responseName]
+ fieldEntriesList = append(fieldEntriesList, &fieldIterateEntry{
+ ResponseName: responseName,
+ Fields: fields,
+ })
+ }
+ return fieldEntriesList
+}
+
+type conflictMessageContainer struct {
+ Conflicts []*ConflictMessage
+}
+
+type ConflictMessage struct {
+ Message string
+ ResponseName string
+ Names []string
+ SubMessage []*ConflictMessage
+ Position *ast.Position
+}
+
+func (m *ConflictMessage) String(buf *bytes.Buffer) {
+ if len(m.SubMessage) == 0 {
+ buf.WriteString(m.Message)
+ return
+ }
+
+ for idx, subMessage := range m.SubMessage {
+ buf.WriteString(`subfields "`)
+ buf.WriteString(subMessage.ResponseName)
+ buf.WriteString(`" conflict because `)
+ subMessage.String(buf)
+ if idx != len(m.SubMessage)-1 {
+ buf.WriteString(" and ")
+ }
+ }
+}
+
+func (m *ConflictMessage) addFieldsConflictMessage(addError AddErrFunc) {
+ var buf bytes.Buffer
+ m.String(&buf)
+ addError(
+ Message(`Fields "%s" conflict because %s. Use different aliases on the fields to fetch both if this was intentional.`, m.ResponseName, buf.String()),
+ At(m.Position),
+ )
+}
+
+type overlappingFieldsCanBeMergedManager struct {
+ walker *Walker
+
+ // per walker
+ comparedFragmentPairs pairSet
+ // cachedFieldsAndFragmentNames interface{}
+
+ // per selectionSet
+ comparedFragments map[string]bool
+}
+
+func (m *overlappingFieldsCanBeMergedManager) findConflictsWithinSelectionSet(selectionSet ast.SelectionSet) []*ConflictMessage {
+ if len(selectionSet) == 0 {
+ return nil
+ }
+
+ fieldsMap, fragmentSpreads := getFieldsAndFragmentNames(selectionSet)
+
+ var conflicts conflictMessageContainer
+
+ // (A) Find find all conflicts "within" the fieldMap of this selection set.
+ // Note: this is the *only place* `collectConflictsWithin` is called.
+ m.collectConflictsWithin(&conflicts, fieldsMap)
+
+ m.comparedFragments = make(map[string]bool)
+ for idx, fragmentSpreadA := range fragmentSpreads {
+ // (B) Then collect conflicts between these fieldMap and those represented by
+ // each spread fragment name found.
+ m.collectConflictsBetweenFieldsAndFragment(&conflicts, false, fieldsMap, fragmentSpreadA)
+
+ for _, fragmentSpreadB := range fragmentSpreads[idx+1:] {
+ // (C) Then compare this fragment with all other fragments found in this
+ // selection set to collect conflicts between fragments spread together.
+ // This compares each item in the list of fragment names to every other
+ // item in that same list (except for itself).
+ m.collectConflictsBetweenFragments(&conflicts, false, fragmentSpreadA, fragmentSpreadB)
+ }
+ }
+
+ return conflicts.Conflicts
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFieldsAndFragment(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fieldsMap *sequentialFieldsMap, fragmentSpread *ast.FragmentSpread) {
+ if m.comparedFragments[fragmentSpread.Name] {
+ return
+ }
+ m.comparedFragments[fragmentSpread.Name] = true
+
+ if fragmentSpread.Definition == nil {
+ return
+ }
+
+ fieldsMapB, fragmentSpreads := getFieldsAndFragmentNames(fragmentSpread.Definition.SelectionSet)
+
+ // Do not compare a fragment's fieldMap to itself.
+ if reflect.DeepEqual(fieldsMap, fieldsMapB) {
+ return
+ }
+
+ // (D) First collect any conflicts between the provided collection of fields
+ // and the collection of fields represented by the given fragment.
+ m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMap, fieldsMapB)
+
+ // (E) Then collect any conflicts between the provided collection of fields
+ // and any fragment names found in the given fragment.
+ for _, fragmentSpread := range fragmentSpreads {
+ m.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsMap, fragmentSpread)
+ }
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFragments(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
+
+ var check func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread)
+ check = func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) {
+
+ if fragmentSpreadA.Name == fragmentSpreadB.Name {
+ return
+ }
+
+ if m.comparedFragmentPairs.Has(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive) {
+ return
+ }
+ m.comparedFragmentPairs.Add(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive)
+
+ if fragmentSpreadA.Definition == nil {
+ return
+ }
+ if fragmentSpreadB.Definition == nil {
+ return
+ }
+
+ fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(fragmentSpreadA.Definition.SelectionSet)
+ fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(fragmentSpreadB.Definition.SelectionSet)
+
+ // (F) First, collect all conflicts between these two collections of fields
+ // (not including any nested fragments).
+ m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
+
+ // (G) Then collect conflicts between the first fragment and any nested
+ // fragments spread in the second fragment.
+ for _, fragmentSpread := range fragmentSpreadsB {
+ check(fragmentSpreadA, fragmentSpread)
+ }
+ // (G) Then collect conflicts between the second fragment and any nested
+ // fragments spread in the first fragment.
+ for _, fragmentSpread := range fragmentSpreadsA {
+ check(fragmentSpread, fragmentSpreadB)
+ }
+ }
+
+ check(fragmentSpreadA, fragmentSpreadB)
+}
+
+func (m *overlappingFieldsCanBeMergedManager) findConflictsBetweenSubSelectionSets(areMutuallyExclusive bool, selectionSetA ast.SelectionSet, selectionSetB ast.SelectionSet) *conflictMessageContainer {
+ var conflicts conflictMessageContainer
+
+ fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(selectionSetA)
+ fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(selectionSetB)
+
+ // (H) First, collect all conflicts between these two collections of field.
+ m.collectConflictsBetween(&conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB)
+
+ // (I) Then collect conflicts between the first collection of fields and
+ // those referenced by each fragment name associated with the second.
+ for _, fragmentSpread := range fragmentSpreadsB {
+ m.comparedFragments = make(map[string]bool)
+ m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapA, fragmentSpread)
+ }
+
+ // (I) Then collect conflicts between the second collection of fields and
+ // those referenced by each fragment name associated with the first.
+ for _, fragmentSpread := range fragmentSpreadsA {
+ m.comparedFragments = make(map[string]bool)
+ m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapB, fragmentSpread)
+ }
+
+ // (J) Also collect conflicts between any fragment names by the first and
+ // fragment names by the second. This compares each item in the first set of
+ // names to each item in the second set of names.
+ for _, fragmentSpreadA := range fragmentSpreadsA {
+ for _, fragmentSpreadB := range fragmentSpreadsB {
+ m.collectConflictsBetweenFragments(&conflicts, areMutuallyExclusive, fragmentSpreadA, fragmentSpreadB)
+ }
+ }
+
+ if len(conflicts.Conflicts) == 0 {
+ return nil
+ }
+
+ return &conflicts
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsWithin(conflicts *conflictMessageContainer, fieldsMap *sequentialFieldsMap) {
+ for _, fields := range fieldsMap.Iterator() {
+ for idx, fieldA := range fields {
+ for _, fieldB := range fields[idx+1:] {
+ conflict := m.findConflict(false, fieldA, fieldB)
+ if conflict != nil {
+ conflicts.Conflicts = append(conflicts.Conflicts, conflict)
+ }
+ }
+ }
+ }
+}
+
+func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetween(conflicts *conflictMessageContainer, parentFieldsAreMutuallyExclusive bool, fieldsMapA *sequentialFieldsMap, fieldsMapB *sequentialFieldsMap) {
+ for _, fieldsEntryA := range fieldsMapA.KeyValueIterator() {
+ fieldsB, ok := fieldsMapB.Get(fieldsEntryA.ResponseName)
+ if !ok {
+ continue
+ }
+ for _, fieldA := range fieldsEntryA.Fields {
+ for _, fieldB := range fieldsB {
+ conflict := m.findConflict(parentFieldsAreMutuallyExclusive, fieldA, fieldB)
+ if conflict != nil {
+ conflicts.Conflicts = append(conflicts.Conflicts, conflict)
+ }
+ }
+ }
+ }
+}
+
+func (m *overlappingFieldsCanBeMergedManager) findConflict(parentFieldsAreMutuallyExclusive bool, fieldA *ast.Field, fieldB *ast.Field) *ConflictMessage {
+ if fieldA.Definition == nil || fieldA.ObjectDefinition == nil || fieldB.Definition == nil || fieldB.ObjectDefinition == nil {
+ return nil
+ }
+
+ areMutuallyExclusive := parentFieldsAreMutuallyExclusive
+ if !areMutuallyExclusive {
+ tmp := fieldA.ObjectDefinition.Name != fieldB.ObjectDefinition.Name
+ tmp = tmp && fieldA.ObjectDefinition.Kind == ast.Object
+ tmp = tmp && fieldB.ObjectDefinition.Kind == ast.Object
+ areMutuallyExclusive = tmp
+ }
+
+ fieldNameA := fieldA.Name
+ if fieldA.Alias != "" {
+ fieldNameA = fieldA.Alias
+ }
+
+ if !areMutuallyExclusive {
+ // Two aliases must refer to the same field.
+ if fieldA.Name != fieldB.Name {
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ Message: fmt.Sprintf(`%s and %s are different fields`, fieldA.Name, fieldB.Name),
+ Position: fieldB.Position,
+ }
+ }
+
+ // Two field calls must have the same arguments.
+ if !sameArguments(fieldA.Arguments, fieldB.Arguments) {
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ Message: "they have differing arguments",
+ Position: fieldB.Position,
+ }
+ }
+ }
+
+ if doTypesConflict(m.walker, fieldA.Definition.Type, fieldB.Definition.Type) {
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ Message: fmt.Sprintf(`they return conflicting types %s and %s`, fieldA.Definition.Type.String(), fieldB.Definition.Type.String()),
+ Position: fieldB.Position,
+ }
+ }
+
+ // Collect and compare sub-fields. Use the same "visited fragment names" list
+ // for both collections so fields in a fragment reference are never
+ // compared to themselves.
+ conflicts := m.findConflictsBetweenSubSelectionSets(areMutuallyExclusive, fieldA.SelectionSet, fieldB.SelectionSet)
+ if conflicts == nil {
+ return nil
+ }
+ return &ConflictMessage{
+ ResponseName: fieldNameA,
+ SubMessage: conflicts.Conflicts,
+ Position: fieldB.Position,
+ }
+}
+
+func sameArguments(args1 []*ast.Argument, args2 []*ast.Argument) bool {
+ if len(args1) != len(args2) {
+ return false
+ }
+ for _, arg1 := range args1 {
+ for _, arg2 := range args2 {
+ if arg1.Name != arg2.Name {
+ return false
+ }
+ if !sameValue(arg1.Value, arg2.Value) {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+func sameValue(value1 *ast.Value, value2 *ast.Value) bool {
+ if value1.Kind != value2.Kind {
+ return false
+ }
+ if value1.Raw != value2.Raw {
+ return false
+ }
+ return true
+}
+
+func doTypesConflict(walker *Walker, type1 *ast.Type, type2 *ast.Type) bool {
+ if type1.Elem != nil {
+ if type2.Elem != nil {
+ return doTypesConflict(walker, type1.Elem, type2.Elem)
+ }
+ return true
+ }
+ if type2.Elem != nil {
+ return true
+ }
+ if type1.NonNull && !type2.NonNull {
+ return true
+ }
+ if !type1.NonNull && type2.NonNull {
+ return true
+ }
+
+ t1 := walker.Schema.Types[type1.NamedType]
+ t2 := walker.Schema.Types[type2.NamedType]
+ if (t1.Kind == ast.Scalar || t1.Kind == ast.Enum) && (t2.Kind == ast.Scalar || t2.Kind == ast.Enum) {
+ return t1.Name != t2.Name
+ }
+
+ return false
+}
+
+func getFieldsAndFragmentNames(selectionSet ast.SelectionSet) (*sequentialFieldsMap, []*ast.FragmentSpread) {
+ fieldsMap := sequentialFieldsMap{
+ data: make(map[string][]*ast.Field),
+ }
+ var fragmentSpreads []*ast.FragmentSpread
+
+ var walk func(selectionSet ast.SelectionSet)
+ walk = func(selectionSet ast.SelectionSet) {
+ for _, selection := range selectionSet {
+ switch selection := selection.(type) {
+ case *ast.Field:
+ responseName := selection.Name
+ if selection.Alias != "" {
+ responseName = selection.Alias
+ }
+ fieldsMap.Push(responseName, selection)
+
+ case *ast.InlineFragment:
+ walk(selection.SelectionSet)
+
+ case *ast.FragmentSpread:
+ fragmentSpreads = append(fragmentSpreads, selection)
+ }
+ }
+ }
+ walk(selectionSet)
+
+ return &fieldsMap, fragmentSpreads
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go b/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
new file mode 100644
index 00000000..971decbf
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/possible_fragment_spreads.go
@@ -0,0 +1,68 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("PossibleFragmentSpreads", func(observers *Events, addError AddErrFunc) {
+
+ validate := func(walker *Walker, parentDef *ast.Definition, fragmentName string, emitError func()) {
+ if parentDef == nil {
+ return
+ }
+
+ var parentDefs []*ast.Definition
+ switch parentDef.Kind {
+ case ast.Object:
+ parentDefs = []*ast.Definition{parentDef}
+ case ast.Interface, ast.Union:
+ parentDefs = walker.Schema.GetPossibleTypes(parentDef)
+ default:
+ panic("unexpected type")
+ }
+
+ fragmentDefType := walker.Schema.Types[fragmentName]
+ if fragmentDefType == nil {
+ return
+ }
+ if !fragmentDefType.IsCompositeType() {
+ // checked by FragmentsOnCompositeTypes
+ return
+ }
+ fragmentDefs := walker.Schema.GetPossibleTypes(fragmentDefType)
+
+ for _, fragmentDef := range fragmentDefs {
+ for _, parentDef := range parentDefs {
+ if parentDef.Name == fragmentDef.Name {
+ return
+ }
+ }
+ }
+
+ emitError()
+ }
+
+ observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) {
+ validate(walker, inlineFragment.ObjectDefinition, inlineFragment.TypeCondition, func() {
+ addError(
+ Message(`Fragment cannot be spread here as objects of type "%s" can never be of type "%s".`, inlineFragment.ObjectDefinition.Name, inlineFragment.TypeCondition),
+ At(inlineFragment.Position),
+ )
+ })
+ })
+
+ observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
+ if fragmentSpread.Definition == nil {
+ return
+ }
+ validate(walker, fragmentSpread.ObjectDefinition, fragmentSpread.Definition.TypeCondition, func() {
+ addError(
+ Message(`Fragment "%s" cannot be spread here as objects of type "%s" can never be of type "%s".`, fragmentSpread.Name, fragmentSpread.ObjectDefinition.Name, fragmentSpread.Definition.TypeCondition),
+ At(fragmentSpread.Position),
+ )
+ })
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go b/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
new file mode 100644
index 00000000..55791a6b
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/provided_required_arguments.go
@@ -0,0 +1,63 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) {
+
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.Definition == nil {
+ return
+ }
+
+ argDef:
+ for _, argDef := range field.Definition.Arguments {
+ if !argDef.Type.NonNull {
+ continue
+ }
+ if argDef.DefaultValue != nil {
+ continue
+ }
+ for _, arg := range field.Arguments {
+ if arg.Name == argDef.Name {
+ continue argDef
+ }
+ }
+
+ addError(
+ Message(`Field "%s" argument "%s" of type "%s" is required but not provided.`, field.Name, argDef.Name, argDef.Type.String()),
+ At(field.Position),
+ )
+ }
+ })
+
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ if directive.Definition == nil {
+ return
+ }
+
+ argDef:
+ for _, argDef := range directive.Definition.Arguments {
+ if !argDef.Type.NonNull {
+ continue
+ }
+ if argDef.DefaultValue != nil {
+ continue
+ }
+ for _, arg := range directive.Arguments {
+ if arg.Name == argDef.Name {
+ continue argDef
+ }
+ }
+
+ addError(
+ Message(`Directive "@%s" argument "%s" of type "%s" is required but not provided.`, directive.Definition.Name, argDef.Name, argDef.Type.String()),
+ At(directive.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go b/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
new file mode 100644
index 00000000..bb961f44
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/scalar_leafs.go
@@ -0,0 +1,36 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("ScalarLeafs", func(observers *Events, addError AddErrFunc) {
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ if field.Definition == nil {
+ return
+ }
+
+ fieldType := walker.Schema.Types[field.Definition.Type.Name()]
+ if fieldType == nil {
+ return
+ }
+
+ if fieldType.IsLeafType() && len(field.SelectionSet) > 0 {
+ addError(
+ Message(`Field "%s" must not have a selection since type "%s" has no subfields.`, field.Name, fieldType.Name),
+ At(field.Position),
+ )
+ }
+
+ if !fieldType.IsLeafType() && len(field.SelectionSet) == 0 {
+ addError(
+ Message(`Field "%s" of type "%s" must have a selection of subfields.`, field.Name, field.Definition.Type.String()),
+ Suggestf(`"%s { ... }"`, field.Name),
+ At(field.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go b/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
new file mode 100644
index 00000000..53003c11
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/single_field_subscriptions.go
@@ -0,0 +1,30 @@
+package validator
+
+import (
+ "strconv"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("SingleFieldSubscriptions", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ if operation.Operation != ast.Subscription {
+ return
+ }
+
+ if len(operation.SelectionSet) > 1 {
+ name := "Anonymous Subscription"
+ if operation.Name != "" {
+ name = `Subscription ` + strconv.Quote(operation.Name)
+ }
+
+ addError(
+ Message(`%s must select only one top level field.`, name),
+ At(operation.SelectionSet[1].GetPosition()),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
new file mode 100644
index 00000000..0ddcde72
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_argument_names.go
@@ -0,0 +1,33 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnField(func(walker *Walker, field *ast.Field) {
+ checkUniqueArgs(field.Arguments, addError)
+ })
+
+ observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
+ checkUniqueArgs(directive.Arguments, addError)
+ })
+ })
+}
+
+func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) {
+ knownArgNames := map[string]bool{}
+
+ for _, arg := range args {
+ if knownArgNames[arg.Name] {
+ addError(
+ Message(`There can be only one argument named "%s".`, arg.Name),
+ At(arg.Position),
+ )
+ }
+
+ knownArgNames[arg.Name] = true
+ }
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
new file mode 100644
index 00000000..077c4687
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_directives_per_location.go
@@ -0,0 +1,24 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) {
+ observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) {
+ seen := map[string]bool{}
+
+ for _, dir := range directives {
+ if seen[dir.Name] {
+ addError(
+ Message(`The directive "%s" can only be used once at this location.`, dir.Name),
+ At(dir.Position),
+ )
+ }
+ seen[dir.Name] = true
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
new file mode 100644
index 00000000..46a8b7c7
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_fragment_names.go
@@ -0,0 +1,22 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) {
+ seenFragments := map[string]bool{}
+
+ observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
+ if seenFragments[fragment.Name] {
+ addError(
+ Message(`There can be only one fragment named "%s".`, fragment.Name),
+ At(fragment.Position),
+ )
+ }
+ seenFragments[fragment.Name] = true
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
new file mode 100644
index 00000000..f254d588
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_input_field_names.go
@@ -0,0 +1,27 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if value.Kind != ast.ObjectValue {
+ return
+ }
+
+ seen := map[string]bool{}
+ for _, field := range value.Children {
+ if seen[field.Name] {
+ addError(
+ Message(`There can be only one input field named "%s".`, field.Name),
+ At(field.Position),
+ )
+ }
+ seen[field.Name] = true
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
new file mode 100644
index 00000000..c1ab56be
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_operation_names.go
@@ -0,0 +1,22 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) {
+ seen := map[string]bool{}
+
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ if seen[operation.Name] {
+ addError(
+ Message(`There can be only one operation named "%s".`, operation.Name),
+ At(operation.Position),
+ )
+ }
+ seen[operation.Name] = true
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go b/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
new file mode 100644
index 00000000..70590a88
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/unique_variable_names.go
@@ -0,0 +1,23 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ seen := map[string]bool{}
+ for _, def := range operation.VariableDefinitions {
+ if seen[def.Variable] {
+ addError(
+ Message(`There can be only one variable named "%s".`, def.Variable),
+ At(def.Position),
+ )
+ }
+ seen[def.Variable] = true
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go b/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
new file mode 100644
index 00000000..d64cc666
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/values_of_correct_type.go
@@ -0,0 +1,130 @@
+package validator
+
+import (
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if value.Definition == nil || value.ExpectedType == nil {
+ return
+ }
+
+ if value.Definition.Kind == ast.Scalar {
+ // Skip custom validating scalars
+ if !value.Definition.OneOf("Int", "Float", "String", "Boolean", "ID") {
+ return
+ }
+ }
+
+ var possibleEnums []string
+ if value.Definition.Kind == ast.Enum {
+ for _, val := range value.Definition.EnumValues {
+ possibleEnums = append(possibleEnums, val.Name)
+ }
+ }
+
+ rawVal, err := value.Value(nil)
+ if err != nil {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ switch value.Kind {
+ case ast.NullValue:
+ if value.ExpectedType.NonNull {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.ListValue:
+ if value.ExpectedType.Elem == nil {
+ unexpectedTypeMessage(addError, value)
+ return
+ }
+
+ case ast.IntValue:
+ if !value.Definition.OneOf("Int", "Float", "ID") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.FloatValue:
+ if !value.Definition.OneOf("Float") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.StringValue, ast.BlockValue:
+ if value.Definition.Kind == ast.Enum {
+ rawValStr := fmt.Sprint(rawVal)
+ addError(
+ Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
+ SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
+ At(value.Position),
+ )
+ } else if !value.Definition.OneOf("String", "ID") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.EnumValue:
+ if value.Definition.Kind != ast.Enum || value.Definition.EnumValues.ForName(value.Raw) == nil {
+ rawValStr := fmt.Sprint(rawVal)
+ addError(
+ Message("Expected type %s, found %s.", value.ExpectedType.String(), value.String()),
+ SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums),
+ At(value.Position),
+ )
+ }
+
+ case ast.BooleanValue:
+ if !value.Definition.OneOf("Boolean") {
+ unexpectedTypeMessage(addError, value)
+ }
+
+ case ast.ObjectValue:
+
+ for _, field := range value.Definition.Fields {
+ if field.Type.NonNull {
+ fieldValue := value.Children.ForName(field.Name)
+ if fieldValue == nil && field.DefaultValue == nil {
+ addError(
+ Message("Field %s.%s of required type %s was not provided.", value.Definition.Name, field.Name, field.Type.String()),
+ At(value.Position),
+ )
+ continue
+ }
+ }
+ }
+
+ for _, fieldValue := range value.Children {
+ if value.Definition.Fields.ForName(fieldValue.Name) == nil {
+ var suggestions []string
+ for _, fieldValue := range value.Definition.Fields {
+ suggestions = append(suggestions, fieldValue.Name)
+ }
+
+ addError(
+ Message(`Field "%s" is not defined by type %s.`, fieldValue.Name, value.Definition.Name),
+ SuggestListUnquoted("Did you mean", fieldValue.Name, suggestions),
+ At(fieldValue.Position),
+ )
+ }
+ }
+
+ case ast.Variable:
+ return
+
+ default:
+ panic(fmt.Errorf("unhandled %T", value))
+ }
+ })
+ })
+}
+
+func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) {
+ addError(
+ Message("Expected type %s, found %s.", v.ExpectedType.String(), v.String()),
+ At(v.Position),
+ )
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
new file mode 100644
index 00000000..9d58ae1c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/variables_are_input_types.go
@@ -0,0 +1,28 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) {
+ observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
+ for _, def := range operation.VariableDefinitions {
+ if def.Definition == nil {
+ continue
+ }
+ if !def.Definition.IsInputType() {
+ addError(
+ Message(
+ `Variable "$%s" cannot be non-input type "%s".`,
+ def.Variable,
+ def.Type.String(),
+ ),
+ At(def.Position),
+ )
+ }
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go b/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
new file mode 100644
index 00000000..e6d97c9f
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/rules/variables_in_allowed_position.go
@@ -0,0 +1,36 @@
+package validator
+
+import (
+ "github.com/vektah/gqlparser/ast"
+ . "github.com/vektah/gqlparser/validator"
+)
+
+func init() {
+ AddRule("VariablesInAllowedPosition", func(observers *Events, addError AddErrFunc) {
+ observers.OnValue(func(walker *Walker, value *ast.Value) {
+ if value.Kind != ast.Variable || value.ExpectedType == nil || value.VariableDefinition == nil || walker.CurrentOperation == nil {
+ return
+ }
+
+ // todo: move me into walk
+ // If there is a default non nullable types can be null
+ if value.VariableDefinition.DefaultValue != nil && value.VariableDefinition.DefaultValue.Kind != ast.NullValue {
+ if value.ExpectedType.NonNull {
+ value.ExpectedType.NonNull = false
+ }
+ }
+
+ if !value.VariableDefinition.Type.IsCompatible(value.ExpectedType) {
+ addError(
+ Message(
+ `Variable "%s" of type "%s" used in position expecting type "%s".`,
+ value,
+ value.VariableDefinition.Type.String(),
+ value.ExpectedType.String(),
+ ),
+ At(value.Position),
+ )
+ }
+ })
+ })
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/schema.go b/vendor/github.com/vektah/gqlparser/validator/schema.go
new file mode 100644
index 00000000..8fa18d7e
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/schema.go
@@ -0,0 +1,212 @@
+//go:generate go run ./inliner/inliner.go
+
+package validator
+
+import (
+ "strconv"
+
+ . "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+ "github.com/vektah/gqlparser/parser"
+)
+
+func LoadSchema(inputs ...*Source) (*Schema, *gqlerror.Error) {
+ ast := &SchemaDocument{}
+ for _, input := range inputs {
+ inputAst, err := parser.ParseSchema(input)
+ if err != nil {
+ return nil, err
+ }
+
+ ast.Merge(inputAst)
+ }
+
+ schema := Schema{
+ Types: map[string]*Definition{},
+ Directives: map[string]*DirectiveDefinition{},
+ PossibleTypes: map[string][]*Definition{},
+ }
+
+ for i, def := range ast.Definitions {
+ if schema.Types[def.Name] != nil {
+ return nil, gqlerror.ErrorPosf(def.Position, "Cannot redeclare type %s.", def.Name)
+ }
+ schema.Types[def.Name] = ast.Definitions[i]
+
+ if def.Kind != Interface {
+ for _, intf := range def.Interfaces {
+ schema.AddPossibleType(intf, ast.Definitions[i])
+ }
+ schema.AddPossibleType(def.Name, ast.Definitions[i])
+ }
+ }
+
+ for _, ext := range ast.Extensions {
+ def := schema.Types[ext.Name]
+ if def == nil {
+ return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because it does not exist.", ext.Name)
+ }
+
+ if def.Kind != ext.Kind {
+ return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because the base type is a %s, not %s.", ext.Name, def.Kind, ext.Kind)
+ }
+
+ def.Directives = append(def.Directives, ext.Directives...)
+ def.Interfaces = append(def.Interfaces, ext.Interfaces...)
+ def.Fields = append(def.Fields, ext.Fields...)
+ def.Types = append(def.Types, ext.Types...)
+ def.EnumValues = append(def.EnumValues, ext.EnumValues...)
+ }
+
+ for i, dir := range ast.Directives {
+ if schema.Directives[dir.Name] != nil {
+ return nil, gqlerror.ErrorPosf(dir.Position, "Cannot redeclare directive %s.", dir.Name)
+ }
+ schema.Directives[dir.Name] = ast.Directives[i]
+ }
+
+ if len(ast.Schema) > 1 {
+ return nil, gqlerror.ErrorPosf(ast.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.")
+ }
+
+ if len(ast.Schema) == 1 {
+ for _, entrypoint := range ast.Schema[0].OperationTypes {
+ def := schema.Types[entrypoint.Type]
+ if def == nil {
+ return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type)
+ }
+ switch entrypoint.Operation {
+ case Query:
+ schema.Query = def
+ case Mutation:
+ schema.Mutation = def
+ case Subscription:
+ schema.Subscription = def
+ }
+ }
+ }
+
+ for _, ext := range ast.SchemaExtension {
+ for _, entrypoint := range ext.OperationTypes {
+ def := schema.Types[entrypoint.Type]
+ if def == nil {
+ return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type)
+ }
+ switch entrypoint.Operation {
+ case Query:
+ schema.Query = def
+ case Mutation:
+ schema.Mutation = def
+ case Subscription:
+ schema.Subscription = def
+ }
+ }
+ }
+
+ for _, typ := range schema.Types {
+ err := validateDefinition(&schema, typ)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ for _, dir := range schema.Directives {
+ err := validateDirective(&schema, dir)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ if schema.Query == nil && schema.Types["Query"] != nil {
+ schema.Query = schema.Types["Query"]
+ }
+
+ if schema.Mutation == nil && schema.Types["Mutation"] != nil {
+ schema.Mutation = schema.Types["Mutation"]
+ }
+
+ if schema.Subscription == nil && schema.Types["Subscription"] != nil {
+ schema.Subscription = schema.Types["Subscription"]
+ }
+
+ if schema.Query != nil {
+ schema.Query.Fields = append(
+ schema.Query.Fields,
+ &FieldDefinition{
+ Name: "__schema",
+ Type: NonNullNamedType("__Schema", nil),
+ },
+ &FieldDefinition{
+ Name: "__type",
+ Type: NonNullNamedType("__Type", nil),
+ Arguments: ArgumentDefinitionList{
+ {Name: "name", Type: NamedType("String", nil)},
+ },
+ },
+ )
+ }
+
+ return &schema, nil
+}
+
+func validateDirective(schema *Schema, def *DirectiveDefinition) *gqlerror.Error {
+ return validateArgs(schema, def.Arguments, def)
+}
+
+func validateDefinition(schema *Schema, def *Definition) *gqlerror.Error {
+ for _, field := range def.Fields {
+ if err := validateTypeRef(schema, field.Type); err != nil {
+ return err
+ }
+ if err := validateArgs(schema, field.Arguments, nil); err != nil {
+ return err
+ }
+ if err := validateDirectives(schema, field.Directives, nil); err != nil {
+ return err
+ }
+ }
+
+ for _, intf := range def.Interfaces {
+ intDef := schema.Types[intf]
+ if intDef == nil {
+ return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intf))
+ }
+ if intDef.Kind != Interface {
+ return gqlerror.ErrorPosf(def.Position, "%s is a non interface type %s.", strconv.Quote(intf), intDef.Kind)
+ }
+ }
+
+ return validateDirectives(schema, def.Directives, nil)
+}
+
+func validateTypeRef(schema *Schema, typ *Type) *gqlerror.Error {
+ if schema.Types[typ.Name()] == nil {
+ return gqlerror.ErrorPosf(typ.Position, "Undefined type %s.", typ.Name())
+ }
+ return nil
+}
+
+func validateArgs(schema *Schema, args ArgumentDefinitionList, currentDirective *DirectiveDefinition) *gqlerror.Error {
+ for _, arg := range args {
+ if err := validateTypeRef(schema, arg.Type); err != nil {
+ return err
+ }
+ if err := validateDirectives(schema, arg.Directives, currentDirective); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func validateDirectives(schema *Schema, dirs DirectiveList, currentDirective *DirectiveDefinition) *gqlerror.Error {
+ for _, dir := range dirs {
+ if currentDirective != nil && dir.Name == currentDirective.Name {
+ return gqlerror.ErrorPosf(dir.Position, "Directive %s cannot refer to itself.", currentDirective.Name)
+ }
+ if schema.Directives[dir.Name] == nil {
+ return gqlerror.ErrorPosf(dir.Position, "Undefined directive %s.", dir.Name)
+ }
+ dir.Definition = schema.Directives[dir.Name]
+ }
+ return nil
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/schema_test.yml b/vendor/github.com/vektah/gqlparser/validator/schema_test.yml
new file mode 100644
index 00000000..59e7145c
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/schema_test.yml
@@ -0,0 +1,152 @@
+types:
+ - name: cannot be redeclared
+ input: |
+ type A {
+ name: String
+ }
+ type A {
+ name: String
+ }
+ error:
+ message: "Cannot redeclare type A."
+ locations: [{line: 4, column: 6}]
+
+interfaces:
+ - name: must exist
+ input: |
+ type Thing implements Object {
+ id: ID!
+ }
+
+ type Query {
+ Things: [Thing!]!
+ }
+ error:
+ message: 'Undefined type "Object".'
+ locations: [{line: 1, column: 6}]
+
+ - name: must be an interface
+ input: |
+ type Thing implements Object {
+ id: ID!
+ }
+
+ type Query {
+ Things: [Thing!]!
+ }
+
+ type Object {
+ name: String
+ }
+ error:
+ message: '"Object" is a non interface type OBJECT.'
+ locations: [{line: 1, column: 6}]
+
+type extensions:
+ - name: cannot extend non existant types
+ input: |
+ extend type A {
+ name: String
+ }
+ error:
+ message: "Cannot extend type A because it does not exist."
+ locations: [{line: 1, column: 13}]
+
+ - name: cannot extend incorret type existant types
+ input: |
+ scalar A
+ extend type A {
+ name: String
+ }
+ error:
+ message: "Cannot extend type A because the base type is a SCALAR, not OBJECT."
+ locations: [{line: 2, column: 13}]
+
+directives:
+ - name: cannot redeclare directives
+ input: |
+ directive @A on FIELD_DEFINITION
+ directive @A on FIELD_DEFINITION
+ error:
+ message: "Cannot redeclare directive A."
+ locations: [{line: 2, column: 12}]
+
+ - name: must be declared
+ input: |
+ type User {
+ name: String @foo
+ }
+ error:
+ message: "Undefined directive foo."
+ locations: [{line: 2, column: 17}]
+
+ - name: cannot be self-referential
+ input: |
+ directive @A(foo: Int! @A) on FIELD_DEFINITION
+ error:
+ message: "Directive A cannot refer to itself."
+ locations: [{line: 1, column: 25}]
+
+entry points:
+ - name: multiple schema entry points
+ input: |
+ schema {
+ query: Query
+ }
+ schema {
+ query: Query
+ }
+ scalar Query
+ error:
+ message: "Cannot have multiple schema entry points, consider schema extensions instead."
+ locations: [{line: 4, column: 8}]
+
+ - name: Undefined schema entrypoint
+ input: |
+ schema {
+ query: Query
+ }
+ error:
+ message: "Schema root query refers to a type Query that does not exist."
+ locations: [{line: 2, column: 3}]
+
+entry point extensions:
+ - name: Undefined schema entrypoint
+ input: |
+ schema {
+ query: Query
+ }
+ scalar Query
+ extend schema {
+ mutation: Mutation
+ }
+ error:
+ message: "Schema root mutation refers to a type Mutation that does not exist."
+ locations: [{line: 6, column: 3}]
+
+type references:
+ - name: Field types
+ input: |
+ type User {
+ posts: Post
+ }
+ error:
+ message: "Undefined type Post."
+ locations: [{line: 2, column: 10}]
+
+ - name: Arg types
+ input: |
+ type User {
+ posts(foo: FooBar): String
+ }
+ error:
+ message: "Undefined type FooBar."
+ locations: [{line: 2, column: 14}]
+
+ - name: Directive arg types
+ input: |
+ directive @Foo(foo: FooBar) on FIELD_DEFINITION
+
+ error:
+ message: "Undefined type FooBar."
+ locations: [{line: 1, column: 21}]
diff --git a/vendor/github.com/vektah/gqlparser/validator/suggestionList.go b/vendor/github.com/vektah/gqlparser/validator/suggestionList.go
new file mode 100644
index 00000000..f58d0fc2
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/suggestionList.go
@@ -0,0 +1,69 @@
+package validator
+
+import (
+ "sort"
+ "strings"
+
+ "github.com/agnivade/levenshtein"
+)
+
+// Given an invalid input string and a list of valid options, returns a filtered
+// list of valid options sorted based on their similarity with the input.
+func SuggestionList(input string, options []string) []string {
+ var results []string
+ optionsByDistance := map[string]int{}
+
+ for _, option := range options {
+ distance := lexicalDistance(input, option)
+ threshold := calcThreshold(input, option)
+ if distance <= threshold {
+ results = append(results, option)
+ optionsByDistance[option] = distance
+ }
+ }
+
+ sort.Slice(results, func(i, j int) bool {
+ return optionsByDistance[results[i]] < optionsByDistance[results[j]]
+ })
+ return results
+}
+
+func calcThreshold(a, b string) (threshold int) {
+ if len(a) >= len(b) {
+ threshold = len(a) / 2
+ } else {
+ threshold = len(b) / 2
+ }
+ if threshold < 1 {
+ threshold = 1
+ }
+ return
+}
+
+// Computes the lexical distance between strings A and B.
+//
+// The "distance" between two strings is given by counting the minimum number
+// of edits needed to transform string A into string B. An edit can be an
+// insertion, deletion, or substitution of a single character, or a swap of two
+// adjacent characters.
+//
+// Includes a custom alteration from Damerau-Levenshtein to treat case changes
+// as a single edit which helps identify mis-cased values with an edit distance
+// of 1.
+//
+// This distance can be useful for detecting typos in input or sorting
+func lexicalDistance(a, b string) int {
+ if a == b {
+ return 0
+ }
+
+ a = strings.ToLower(a)
+ b = strings.ToLower(b)
+
+ // Any case change counts as a single edit
+ if a == b {
+ return 1
+ }
+
+ return levenshtein.ComputeDistance(a, b)
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/validator.go b/vendor/github.com/vektah/gqlparser/validator/validator.go
new file mode 100644
index 00000000..bbacec6f
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/validator.go
@@ -0,0 +1,44 @@
+package validator
+
+import (
+ . "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+type AddErrFunc func(options ...ErrorOption)
+
+type ruleFunc func(observers *Events, addError AddErrFunc)
+
+type rule struct {
+ name string
+ rule ruleFunc
+}
+
+var rules []rule
+
+// addRule to rule set.
+// f is called once each time `Validate` is executed.
+func AddRule(name string, f ruleFunc) {
+ rules = append(rules, rule{name: name, rule: f})
+}
+
+func Validate(schema *Schema, doc *QueryDocument) gqlerror.List {
+ var errs gqlerror.List
+
+ observers := &Events{}
+ for i := range rules {
+ rule := rules[i]
+ rule.rule(observers, func(options ...ErrorOption) {
+ err := &gqlerror.Error{
+ Rule: rule.name,
+ }
+ for _, o := range options {
+ o(err)
+ }
+ errs = append(errs, err)
+ })
+ }
+
+ Walk(schema, doc, observers)
+ return errs
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/vars.go b/vendor/github.com/vektah/gqlparser/validator/vars.go
new file mode 100644
index 00000000..0743f5cc
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/vars.go
@@ -0,0 +1,195 @@
+package validator
+
+import (
+ "reflect"
+
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+ "github.com/vektah/gqlparser/gqlerror"
+)
+
+var UnexpectedType = fmt.Errorf("Unexpected Type")
+
+// VariableValues coerces and validates variable values
+func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables map[string]interface{}) (map[string]interface{}, *gqlerror.Error) {
+ coercedVars := map[string]interface{}{}
+
+ validator := varValidator{
+ path: []interface{}{"variable"},
+ schema: schema,
+ }
+
+ for _, v := range op.VariableDefinitions {
+ validator.path = append(validator.path, v.Variable)
+
+ if !v.Definition.IsInputType() {
+ return nil, gqlerror.ErrorPathf(validator.path, "must an input type")
+ }
+
+ val, hasValue := variables[v.Variable]
+ if !hasValue {
+ if v.DefaultValue != nil {
+ var err error
+ val, err = v.DefaultValue.Value(nil)
+ if err != nil {
+ return nil, gqlerror.WrapPath(validator.path, err)
+ }
+ hasValue = true
+ } else if v.Type.NonNull {
+ return nil, gqlerror.ErrorPathf(validator.path, "must be defined")
+ }
+ }
+
+ if hasValue {
+ if val == nil {
+ if v.Type.NonNull {
+ return nil, gqlerror.ErrorPathf(validator.path, "cannot be null")
+ }
+ coercedVars[v.Variable] = nil
+ } else {
+ rv := reflect.ValueOf(val)
+ if rv.Kind() == reflect.Ptr || rv.Kind() == reflect.Interface {
+ rv = rv.Elem()
+ }
+
+ if err := validator.validateVarType(v.Type, rv); err != nil {
+ return nil, err
+ }
+
+ coercedVars[v.Variable] = val
+ }
+ }
+
+ validator.path = validator.path[0 : len(validator.path)-1]
+ }
+
+ return coercedVars, nil
+}
+
+type varValidator struct {
+ path []interface{}
+ schema *ast.Schema
+}
+
+func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) *gqlerror.Error {
+ if typ.Elem != nil {
+ if val.Kind() != reflect.Slice {
+ return gqlerror.ErrorPathf(v.path, "must be an array")
+ }
+
+ for i := 0; i < val.Len(); i++ {
+ v.path = append(v.path, i)
+ field := val.Index(i)
+
+ if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface {
+ if typ.Elem.NonNull && field.IsNil() {
+ return gqlerror.ErrorPathf(v.path, "cannot be null")
+ }
+ field = field.Elem()
+ }
+
+ if err := v.validateVarType(typ.Elem, field); err != nil {
+ return err
+ }
+
+ v.path = v.path[0 : len(v.path)-1]
+ }
+
+ return nil
+ }
+
+ def := v.schema.Types[typ.NamedType]
+ if def == nil {
+ panic(fmt.Errorf("missing def for %s", typ.NamedType))
+ }
+
+ switch def.Kind {
+ case ast.Enum:
+ kind := val.Type().Kind()
+ if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String {
+ return nil
+ }
+ return gqlerror.ErrorPathf(v.path, "enums must be ints or strings")
+ case ast.Scalar:
+ kind := val.Type().Kind()
+ switch typ.NamedType {
+ case "Int":
+ if kind == reflect.String || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 {
+ return nil
+ }
+ case "Float":
+ if kind == reflect.String || kind == reflect.Float32 || kind == reflect.Float64 || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 {
+ return nil
+ }
+ case "String":
+ if kind == reflect.String {
+ return nil
+ }
+
+ case "Boolean":
+ if kind == reflect.Bool {
+ return nil
+ }
+
+ case "ID":
+ if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String {
+ return nil
+ }
+ default:
+ // assume custom scalars are ok
+ return nil
+ }
+ return gqlerror.ErrorPathf(v.path, "cannot use %s as %s", kind.String(), typ.NamedType)
+ case ast.InputObject:
+ if val.Kind() != reflect.Map {
+ return gqlerror.ErrorPathf(v.path, "must be a %s", def.Name)
+ }
+
+ // check for unknown fields
+ for _, name := range val.MapKeys() {
+ val.MapIndex(name)
+ fieldDef := def.Fields.ForName(name.String())
+ v.path = append(v.path, name)
+
+ if fieldDef == nil {
+ return gqlerror.ErrorPathf(v.path, "unknown field")
+ }
+ v.path = v.path[0 : len(v.path)-1]
+ }
+
+ for _, fieldDef := range def.Fields {
+ v.path = append(v.path, fieldDef.Name)
+
+ field := val.MapIndex(reflect.ValueOf(fieldDef.Name))
+ if !field.IsValid() {
+ if fieldDef.Type.NonNull {
+ return gqlerror.ErrorPathf(v.path, "must be defined")
+ }
+ continue
+ }
+
+ if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface {
+ if fieldDef.Type.NonNull && field.IsNil() {
+ return gqlerror.ErrorPathf(v.path, "cannot be null")
+ }
+ //allow null object field and skip it
+ if !fieldDef.Type.NonNull && field.IsNil() {
+ continue
+ }
+ field = field.Elem()
+ }
+
+ err := v.validateVarType(fieldDef.Type, field)
+ if err != nil {
+ return err
+ }
+
+ v.path = v.path[0 : len(v.path)-1]
+ }
+ default:
+ panic(fmt.Errorf("unsupported type %s", def.Kind))
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/vektah/gqlparser/validator/walk.go b/vendor/github.com/vektah/gqlparser/validator/walk.go
new file mode 100644
index 00000000..751ba1f1
--- /dev/null
+++ b/vendor/github.com/vektah/gqlparser/validator/walk.go
@@ -0,0 +1,286 @@
+package validator
+
+import (
+ "context"
+ "fmt"
+
+ "github.com/vektah/gqlparser/ast"
+)
+
+type Events struct {
+ operationVisitor []func(walker *Walker, operation *ast.OperationDefinition)
+ field []func(walker *Walker, field *ast.Field)
+ fragment []func(walker *Walker, fragment *ast.FragmentDefinition)
+ inlineFragment []func(walker *Walker, inlineFragment *ast.InlineFragment)
+ fragmentSpread []func(walker *Walker, fragmentSpread *ast.FragmentSpread)
+ directive []func(walker *Walker, directive *ast.Directive)
+ directiveList []func(walker *Walker, directives []*ast.Directive)
+ value []func(walker *Walker, value *ast.Value)
+}
+
+func (o *Events) OnOperation(f func(walker *Walker, operation *ast.OperationDefinition)) {
+ o.operationVisitor = append(o.operationVisitor, f)
+}
+func (o *Events) OnField(f func(walker *Walker, field *ast.Field)) {
+ o.field = append(o.field, f)
+}
+func (o *Events) OnFragment(f func(walker *Walker, fragment *ast.FragmentDefinition)) {
+ o.fragment = append(o.fragment, f)
+}
+func (o *Events) OnInlineFragment(f func(walker *Walker, inlineFragment *ast.InlineFragment)) {
+ o.inlineFragment = append(o.inlineFragment, f)
+}
+func (o *Events) OnFragmentSpread(f func(walker *Walker, fragmentSpread *ast.FragmentSpread)) {
+ o.fragmentSpread = append(o.fragmentSpread, f)
+}
+func (o *Events) OnDirective(f func(walker *Walker, directive *ast.Directive)) {
+ o.directive = append(o.directive, f)
+}
+func (o *Events) OnDirectiveList(f func(walker *Walker, directives []*ast.Directive)) {
+ o.directiveList = append(o.directiveList, f)
+}
+func (o *Events) OnValue(f func(walker *Walker, value *ast.Value)) {
+ o.value = append(o.value, f)
+}
+
+func Walk(schema *ast.Schema, document *ast.QueryDocument, observers *Events) {
+ w := Walker{
+ Observers: observers,
+ Schema: schema,
+ Document: document,
+ }
+
+ w.walk()
+}
+
+type Walker struct {
+ Context context.Context
+ Observers *Events
+ Schema *ast.Schema
+ Document *ast.QueryDocument
+
+ validatedFragmentSpreads map[string]bool
+ CurrentOperation *ast.OperationDefinition
+}
+
+func (w *Walker) walk() {
+ for _, child := range w.Document.Operations {
+ w.validatedFragmentSpreads = make(map[string]bool)
+ w.walkOperation(child)
+ }
+ for _, child := range w.Document.Fragments {
+ w.validatedFragmentSpreads = make(map[string]bool)
+ w.walkFragment(child)
+ }
+}
+
+func (w *Walker) walkOperation(operation *ast.OperationDefinition) {
+ w.CurrentOperation = operation
+ for _, varDef := range operation.VariableDefinitions {
+ varDef.Definition = w.Schema.Types[varDef.Type.Name()]
+
+ if varDef.DefaultValue != nil {
+ varDef.DefaultValue.ExpectedType = varDef.Type
+ varDef.DefaultValue.Definition = w.Schema.Types[varDef.Type.Name()]
+ }
+ }
+
+ var def *ast.Definition
+ var loc ast.DirectiveLocation
+ switch operation.Operation {
+ case ast.Query, "":
+ def = w.Schema.Query
+ loc = ast.LocationQuery
+ case ast.Mutation:
+ def = w.Schema.Mutation
+ loc = ast.LocationMutation
+ case ast.Subscription:
+ def = w.Schema.Subscription
+ loc = ast.LocationSubscription
+ }
+
+ w.walkDirectives(def, operation.Directives, loc)
+
+ for _, varDef := range operation.VariableDefinitions {
+ if varDef.DefaultValue != nil {
+ w.walkValue(varDef.DefaultValue)
+ }
+ }
+
+ w.walkSelectionSet(def, operation.SelectionSet)
+
+ for _, v := range w.Observers.operationVisitor {
+ v(w, operation)
+ }
+ w.CurrentOperation = nil
+}
+
+func (w *Walker) walkFragment(it *ast.FragmentDefinition) {
+ def := w.Schema.Types[it.TypeCondition]
+
+ it.Definition = def
+
+ w.walkDirectives(def, it.Directives, ast.LocationFragmentDefinition)
+ w.walkSelectionSet(def, it.SelectionSet)
+
+ for _, v := range w.Observers.fragment {
+ v(w, it)
+ }
+}
+
+func (w *Walker) walkDirectives(parentDef *ast.Definition, directives []*ast.Directive, location ast.DirectiveLocation) {
+ for _, dir := range directives {
+ def := w.Schema.Directives[dir.Name]
+ dir.Definition = def
+ dir.ParentDefinition = parentDef
+ dir.Location = location
+
+ for _, arg := range dir.Arguments {
+ var argDef *ast.ArgumentDefinition
+ if def != nil {
+ argDef = def.Arguments.ForName(arg.Name)
+ }
+
+ w.walkArgument(argDef, arg)
+ }
+
+ for _, v := range w.Observers.directive {
+ v(w, dir)
+ }
+ }
+
+ for _, v := range w.Observers.directiveList {
+ v(w, directives)
+ }
+}
+
+func (w *Walker) walkValue(value *ast.Value) {
+ if value.Kind == ast.Variable && w.CurrentOperation != nil {
+ value.VariableDefinition = w.CurrentOperation.VariableDefinitions.ForName(value.Raw)
+ if value.VariableDefinition != nil {
+ value.VariableDefinition.Used = true
+ }
+ }
+
+ if value.Kind == ast.ObjectValue {
+ for _, child := range value.Children {
+ if value.Definition != nil {
+ fieldDef := value.Definition.Fields.ForName(child.Name)
+ if fieldDef != nil {
+ child.Value.ExpectedType = fieldDef.Type
+ child.Value.Definition = w.Schema.Types[fieldDef.Type.Name()]
+ }
+ }
+ w.walkValue(child.Value)
+ }
+ }
+
+ if value.Kind == ast.ListValue {
+ for _, child := range value.Children {
+ if value.ExpectedType != nil && value.ExpectedType.Elem != nil {
+ child.Value.ExpectedType = value.ExpectedType.Elem
+ child.Value.Definition = value.Definition
+ }
+
+ w.walkValue(child.Value)
+ }
+ }
+
+ for _, v := range w.Observers.value {
+ v(w, value)
+ }
+}
+
+func (w *Walker) walkArgument(argDef *ast.ArgumentDefinition, arg *ast.Argument) {
+ if argDef != nil {
+ arg.Value.ExpectedType = argDef.Type
+ arg.Value.Definition = w.Schema.Types[argDef.Type.Name()]
+ }
+
+ w.walkValue(arg.Value)
+}
+
+func (w *Walker) walkSelectionSet(parentDef *ast.Definition, it ast.SelectionSet) {
+ for _, child := range it {
+ w.walkSelection(parentDef, child)
+ }
+}
+
+func (w *Walker) walkSelection(parentDef *ast.Definition, it ast.Selection) {
+ switch it := it.(type) {
+ case *ast.Field:
+ var def *ast.FieldDefinition
+ if it.Name == "__typename" {
+ def = &ast.FieldDefinition{
+ Name: "__typename",
+ Type: ast.NamedType("String", nil),
+ }
+ } else if parentDef != nil {
+ def = parentDef.Fields.ForName(it.Name)
+ }
+
+ it.Definition = def
+ it.ObjectDefinition = parentDef
+
+ var nextParentDef *ast.Definition
+ if def != nil {
+ nextParentDef = w.Schema.Types[def.Type.Name()]
+ }
+
+ for _, arg := range it.Arguments {
+ var argDef *ast.ArgumentDefinition
+ if def != nil {
+ argDef = def.Arguments.ForName(arg.Name)
+ }
+
+ w.walkArgument(argDef, arg)
+ }
+
+ w.walkDirectives(nextParentDef, it.Directives, ast.LocationField)
+ w.walkSelectionSet(nextParentDef, it.SelectionSet)
+
+ for _, v := range w.Observers.field {
+ v(w, it)
+ }
+
+ case *ast.InlineFragment:
+ it.ObjectDefinition = parentDef
+
+ nextParentDef := parentDef
+ if it.TypeCondition != "" {
+ nextParentDef = w.Schema.Types[it.TypeCondition]
+ }
+
+ w.walkDirectives(nextParentDef, it.Directives, ast.LocationInlineFragment)
+ w.walkSelectionSet(nextParentDef, it.SelectionSet)
+
+ for _, v := range w.Observers.inlineFragment {
+ v(w, it)
+ }
+
+ case *ast.FragmentSpread:
+ def := w.Document.Fragments.ForName(it.Name)
+ it.Definition = def
+ it.ObjectDefinition = parentDef
+
+ var nextParentDef *ast.Definition
+ if def != nil {
+ nextParentDef = w.Schema.Types[def.TypeCondition]
+ }
+
+ w.walkDirectives(nextParentDef, it.Directives, ast.LocationFragmentSpread)
+
+ if def != nil && !w.validatedFragmentSpreads[def.Name] {
+ // prevent inifinite recursion
+ w.validatedFragmentSpreads[def.Name] = true
+ w.walkSelectionSet(nextParentDef, def.SelectionSet)
+ }
+
+ for _, v := range w.Observers.fragmentSpread {
+ v(w, it)
+ }
+
+ default:
+ panic(fmt.Errorf("unsupported %T", it))
+ }
+}