aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.travis.yml2
-rw-r--r--Gopkg.lock28
-rw-r--r--Gopkg.toml2
-rw-r--r--git-bug.go4
-rw-r--r--graphql/connections/connection_template.go4
-rw-r--r--graphql/connections/gen_comment.go4
-rw-r--r--graphql/connections/gen_identity.go4
-rw-r--r--graphql/connections/gen_lazy_bug.go4
-rw-r--r--graphql/connections/gen_lazy_identity.go4
-rw-r--r--graphql/connections/gen_operation.go4
-rw-r--r--graphql/connections/gen_timeline.go4
-rw-r--r--graphql/graph/gen_graph.go10753
-rw-r--r--graphql/models/gen_models.go34
-rw-r--r--graphql/resolvers/bug.go35
-rw-r--r--graphql/resolvers/mutation.go82
-rw-r--r--graphql/resolvers/operations.go30
-rw-r--r--graphql/resolvers/repo.go30
-rw-r--r--graphql/resolvers/timeline.go40
-rw-r--r--vendor/github.com/99designs/gqlgen/api/generate.go76
-rw-r--r--vendor/github.com/99designs/gqlgen/api/option.go20
-rw-r--r--vendor/github.com/99designs/gqlgen/cmd/ambient.go (renamed from vendor/github.com/99designs/gqlgen/codegen/ambient.go)2
-rw-r--r--vendor/github.com/99designs/gqlgen/cmd/gen.go34
-rw-r--r--vendor/github.com/99designs/gqlgen/cmd/init.go66
-rw-r--r--vendor/github.com/99designs/gqlgen/cmd/root.go11
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/args.go104
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/args.gotpl43
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/build.go194
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/codegen.go179
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/complexity.go11
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/config.go273
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/config/binder.go451
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/config/config.go408
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/data.go168
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/directive.go119
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/directive_build.go48
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/enum.go12
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/enum_build.go39
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/field.go394
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/field.gotpl (renamed from vendor/github.com/99designs/gqlgen/codegen/templates/field.gotpl)62
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/generate.go15
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/generated!.gotpl (renamed from vendor/github.com/99designs/gqlgen/codegen/templates/generated.gotpl)104
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/input.gotpl56
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/input_build.go96
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/interface.go58
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/interface.gotpl20
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/interface_build.go53
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/model.go17
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/models_build.go91
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/object.go483
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/object.gotpl77
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/object_build.go181
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/args.gotpl13
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/data.go13
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/import.go46
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/input.gotpl28
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/interface.gotpl18
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/models.gotpl91
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/object.gotpl69
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/resolver.gotpl44
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/templates/templates.go452
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/type.go174
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/type.gotpl131
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/type_build.go100
-rw-r--r--vendor/github.com/99designs/gqlgen/codegen/util.go358
l---------vendor/github.com/99designs/gqlgen/docs/content/_introduction.md1
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/bool.go2
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/context.go69
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/error.go4
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/exec.go10
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/fieldset.go63
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/id.go21
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/int.go50
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/introspection/type.go20
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/jsonw.go31
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/root.go7
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/string.go59
-rw-r--r--vendor/github.com/99designs/gqlgen/graphql/version.go2
-rw-r--r--vendor/github.com/99designs/gqlgen/handler/graphql.go38
-rw-r--r--vendor/github.com/99designs/gqlgen/handler/playground.go19
-rw-r--r--vendor/github.com/99designs/gqlgen/handler/websocket.go30
-rw-r--r--vendor/github.com/99designs/gqlgen/internal/code/compare.go163
-rw-r--r--vendor/github.com/99designs/gqlgen/internal/code/imports.go60
-rw-r--r--vendor/github.com/99designs/gqlgen/internal/code/util.go56
-rw-r--r--vendor/github.com/99designs/gqlgen/internal/gopath/gopath.go37
-rw-r--r--vendor/github.com/99designs/gqlgen/internal/imports/prune.go22
-rw-r--r--vendor/github.com/99designs/gqlgen/plugin/modelgen/models.go207
-rw-r--r--vendor/github.com/99designs/gqlgen/plugin/modelgen/models.gotpl85
-rw-r--r--vendor/github.com/99designs/gqlgen/plugin/plugin.go20
-rw-r--r--vendor/github.com/99designs/gqlgen/plugin/resolvergen/resolver.go53
-rw-r--r--vendor/github.com/99designs/gqlgen/plugin/resolvergen/resolver.gotpl40
-rw-r--r--vendor/github.com/99designs/gqlgen/plugin/servergen/server.go49
-rw-r--r--vendor/github.com/99designs/gqlgen/plugin/servergen/server.gotpl (renamed from vendor/github.com/99designs/gqlgen/codegen/templates/server.gotpl)16
-rw-r--r--vendor/github.com/theckman/goconstraint/go1.8/gte/constraint.go8
-rw-r--r--vendor/github.com/theckman/goconstraint/go1.8/gte/go18.go7
-rw-r--r--vendor/github.com/theckman/goconstraint/go1.9/gte/constraint.go8
-rw-r--r--vendor/github.com/theckman/goconstraint/go1.9/gte/go19.go7
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/allpackages.go198
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/fakecontext.go109
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/overlay.go103
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/tags.go75
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/util.go212
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go109
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/importer.go73
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/main.go99
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go852
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go1028
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go93
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go1051
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go598
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/isAlias18.go13
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/isAlias19.go13
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go21
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go13
-rw-r--r--vendor/golang.org/x/tools/go/loader/doc.go205
-rw-r--r--vendor/golang.org/x/tools/go/loader/loader.go1078
-rw-r--r--vendor/golang.org/x/tools/go/loader/util.go124
-rw-r--r--vendor/golang.org/x/tools/go/packages/doc.go269
-rw-r--r--vendor/golang.org/x/tools/go/packages/external.go68
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist.go337
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist_fallback.go282
-rw-r--r--vendor/golang.org/x/tools/go/packages/packages.go824
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/imports.go31
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/map.go313
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go72
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/ui.go52
125 files changed, 15684 insertions, 10465 deletions
diff --git a/.travis.yml b/.travis.yml
index a405fecc..95c86e7f 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,8 +1,6 @@
matrix:
include:
- language: go
- go: "1.8"
- - language: go
go: "1.9"
- language: go
go: "1.10"
diff --git a/Gopkg.lock b/Gopkg.lock
index 40e69d5b..409b69a0 100644
--- a/Gopkg.lock
+++ b/Gopkg.lock
@@ -2,22 +2,28 @@
[[projects]]
- digest = "1:e8f6639eaa399c8595b9a2dee175514a9f3842888dc080e2776360dc604150dc"
+ digest = "1:d28d302ab337b9ea3af38c04429e198ab422c1e026592aebcf7aec66ebbc4645"
name = "github.com/99designs/gqlgen"
packages = [
+ "api",
"cmd",
"codegen",
+ "codegen/config",
"codegen/templates",
"complexity",
"graphql",
"graphql/introspection",
"handler",
- "internal/gopath",
+ "internal/code",
"internal/imports",
+ "plugin",
+ "plugin/modelgen",
+ "plugin/resolvergen",
+ "plugin/servergen",
]
pruneopts = "UT"
- revision = "da1e07f5876c0fb79cbad19006f7135be08590d6"
- version = "v0.7.2"
+ revision = "010a79b66f08732cb70d133dcab297a8ee895572"
+ version = "v0.8.3"
[[projects]]
branch = "master"
@@ -300,9 +306,9 @@
version = "v1.2.2"
[[projects]]
- digest = "1:0ec412b28282c10ac6ce56039120741bb738814e4874d23c15fdff37f6aa7a62"
+ digest = "1:823766f4e1833bd562339317d905475fe46789da3863b2da7a1871f9f12bb4b3"
name = "github.com/theckman/goconstraint"
- packages = ["go1.8/gte"]
+ packages = ["go1.9/gte"]
pruneopts = "UT"
revision = "93babf24513d0e8277635da8169fcc5a46ae3f6a"
version = "v1.11.0"
@@ -382,13 +388,15 @@
[[projects]]
branch = "master"
- digest = "1:d22891f2d4a24a531ae01994abae377ec9d8a45ec8849aa95c27dc36014b8c24"
+ digest = "1:090a56ffcfac7f9095b601438e520f4b3609f3442afbaf151f750e2e377c3508"
name = "golang.org/x/tools"
packages = [
"go/ast/astutil",
- "go/buildutil",
+ "go/gcexportdata",
"go/internal/cgo",
- "go/loader",
+ "go/internal/gcimporter",
+ "go/packages",
+ "go/types/typeutil",
"imports",
"internal/fastwalk",
]
@@ -444,7 +452,7 @@
"github.com/spf13/cobra/doc",
"github.com/stretchr/testify/assert",
"github.com/stretchr/testify/require",
- "github.com/theckman/goconstraint/go1.8/gte",
+ "github.com/theckman/goconstraint/go1.9/gte",
"github.com/vektah/gqlgen/client",
"github.com/vektah/gqlparser",
"github.com/vektah/gqlparser/ast",
diff --git a/Gopkg.toml b/Gopkg.toml
index 0fe18091..15242ed8 100644
--- a/Gopkg.toml
+++ b/Gopkg.toml
@@ -58,7 +58,7 @@
[[constraint]]
name = "github.com/99designs/gqlgen"
- version = "0.7.1"
+ version = "0.8.3"
[[constraint]]
name = "github.com/MichaelMure/gocui"
diff --git a/git-bug.go b/git-bug.go
index c3a942e1..20bf74bc 100644
--- a/git-bug.go
+++ b/git-bug.go
@@ -8,8 +8,8 @@ package main
import (
"github.com/MichaelMure/git-bug/commands"
- // minimal go version is 1.8
- _ "github.com/theckman/goconstraint/go1.8/gte"
+ // minimal go version is 1.9
+ _ "github.com/theckman/goconstraint/go1.9/gte"
)
func main() {
diff --git a/graphql/connections/connection_template.go b/graphql/connections/connection_template.go
index 412eb318..4370957c 100644
--- a/graphql/connections/connection_template.go
+++ b/graphql/connections/connection_template.go
@@ -29,10 +29,10 @@ type NameConMaker func(
edges []EdgeType,
nodes []NodeType,
info models.PageInfo,
- totalCount int) (ConnectionType, error)
+ totalCount int) (*ConnectionType, error)
// NameCon will paginate a source according to the input of a relay connection
-func NameCon(source []NodeType, edgeMaker NameEdgeMaker, conMaker NameConMaker, input models.ConnectionInput) (ConnectionType, error) {
+func NameCon(source []NodeType, edgeMaker NameEdgeMaker, conMaker NameConMaker, input models.ConnectionInput) (*ConnectionType, error) {
var nodes []NodeType
var edges []EdgeType
var cursors []string
diff --git a/graphql/connections/gen_comment.go b/graphql/connections/gen_comment.go
index 6df21c58..021320c6 100644
--- a/graphql/connections/gen_comment.go
+++ b/graphql/connections/gen_comment.go
@@ -20,10 +20,10 @@ type CommentConMaker func(
edges []models.CommentEdge,
nodes []bug.Comment,
info models.PageInfo,
- totalCount int) (models.CommentConnection, error)
+ totalCount int) (*models.CommentConnection, error)
// CommentCon will paginate a source according to the input of a relay connection
-func CommentCon(source []bug.Comment, edgeMaker CommentEdgeMaker, conMaker CommentConMaker, input models.ConnectionInput) (models.CommentConnection, error) {
+func CommentCon(source []bug.Comment, edgeMaker CommentEdgeMaker, conMaker CommentConMaker, input models.ConnectionInput) (*models.CommentConnection, error) {
var nodes []bug.Comment
var edges []models.CommentEdge
var cursors []string
diff --git a/graphql/connections/gen_identity.go b/graphql/connections/gen_identity.go
index 2ba2f98f..6c1e7137 100644
--- a/graphql/connections/gen_identity.go
+++ b/graphql/connections/gen_identity.go
@@ -20,10 +20,10 @@ type IdentityConMaker func(
edges []models.IdentityEdge,
nodes []identity.Interface,
info models.PageInfo,
- totalCount int) (models.IdentityConnection, error)
+ totalCount int) (*models.IdentityConnection, error)
// IdentityCon will paginate a source according to the input of a relay connection
-func IdentityCon(source []identity.Interface, edgeMaker IdentityEdgeMaker, conMaker IdentityConMaker, input models.ConnectionInput) (models.IdentityConnection, error) {
+func IdentityCon(source []identity.Interface, edgeMaker IdentityEdgeMaker, conMaker IdentityConMaker, input models.ConnectionInput) (*models.IdentityConnection, error) {
var nodes []identity.Interface
var edges []models.IdentityEdge
var cursors []string
diff --git a/graphql/connections/gen_lazy_bug.go b/graphql/connections/gen_lazy_bug.go
index ba0a65fa..e9da5cc7 100644
--- a/graphql/connections/gen_lazy_bug.go
+++ b/graphql/connections/gen_lazy_bug.go
@@ -19,10 +19,10 @@ type LazyBugConMaker func(
edges []LazyBugEdge,
nodes []string,
info models.PageInfo,
- totalCount int) (models.BugConnection, error)
+ totalCount int) (*models.BugConnection, error)
// LazyBugCon will paginate a source according to the input of a relay connection
-func LazyBugCon(source []string, edgeMaker LazyBugEdgeMaker, conMaker LazyBugConMaker, input models.ConnectionInput) (models.BugConnection, error) {
+func LazyBugCon(source []string, edgeMaker LazyBugEdgeMaker, conMaker LazyBugConMaker, input models.ConnectionInput) (*models.BugConnection, error) {
var nodes []string
var edges []LazyBugEdge
var cursors []string
diff --git a/graphql/connections/gen_lazy_identity.go b/graphql/connections/gen_lazy_identity.go
index 28501171..8bb2097d 100644
--- a/graphql/connections/gen_lazy_identity.go
+++ b/graphql/connections/gen_lazy_identity.go
@@ -19,10 +19,10 @@ type LazyIdentityConMaker func(
edges []LazyIdentityEdge,
nodes []string,
info models.PageInfo,
- totalCount int) (models.IdentityConnection, error)
+ totalCount int) (*models.IdentityConnection, error)
// LazyIdentityCon will paginate a source according to the input of a relay connection
-func LazyIdentityCon(source []string, edgeMaker LazyIdentityEdgeMaker, conMaker LazyIdentityConMaker, input models.ConnectionInput) (models.IdentityConnection, error) {
+func LazyIdentityCon(source []string, edgeMaker LazyIdentityEdgeMaker, conMaker LazyIdentityConMaker, input models.ConnectionInput) (*models.IdentityConnection, error) {
var nodes []string
var edges []LazyIdentityEdge
var cursors []string
diff --git a/graphql/connections/gen_operation.go b/graphql/connections/gen_operation.go
index 3fbd724f..f9273f06 100644
--- a/graphql/connections/gen_operation.go
+++ b/graphql/connections/gen_operation.go
@@ -20,10 +20,10 @@ type OperationConMaker func(
edges []models.OperationEdge,
nodes []bug.Operation,
info models.PageInfo,
- totalCount int) (models.OperationConnection, error)
+ totalCount int) (*models.OperationConnection, error)
// OperationCon will paginate a source according to the input of a relay connection
-func OperationCon(source []bug.Operation, edgeMaker OperationEdgeMaker, conMaker OperationConMaker, input models.ConnectionInput) (models.OperationConnection, error) {
+func OperationCon(source []bug.Operation, edgeMaker OperationEdgeMaker, conMaker OperationConMaker, input models.ConnectionInput) (*models.OperationConnection, error) {
var nodes []bug.Operation
var edges []models.OperationEdge
var cursors []string
diff --git a/graphql/connections/gen_timeline.go b/graphql/connections/gen_timeline.go
index 1a4b2fe5..ad3fbf5b 100644
--- a/graphql/connections/gen_timeline.go
+++ b/graphql/connections/gen_timeline.go
@@ -20,10 +20,10 @@ type TimelineItemConMaker func(
edges []models.TimelineItemEdge,
nodes []bug.TimelineItem,
info models.PageInfo,
- totalCount int) (models.TimelineItemConnection, error)
+ totalCount int) (*models.TimelineItemConnection, error)
// TimelineItemCon will paginate a source according to the input of a relay connection
-func TimelineItemCon(source []bug.TimelineItem, edgeMaker TimelineItemEdgeMaker, conMaker TimelineItemConMaker, input models.ConnectionInput) (models.TimelineItemConnection, error) {
+func TimelineItemCon(source []bug.TimelineItem, edgeMaker TimelineItemEdgeMaker, conMaker TimelineItemConMaker, input models.ConnectionInput) (*models.TimelineItemConnection, error) {
var nodes []bug.TimelineItem
var edges []models.TimelineItemEdge
var cursors []string
diff --git a/graphql/graph/gen_graph.go b/graphql/graph/gen_graph.go
index 9c0003f2..516a474b 100644
--- a/graphql/graph/gen_graph.go
+++ b/graphql/graph/gen_graph.go
@@ -21,6 +21,8 @@ import (
"github.com/vektah/gqlparser/ast"
)
+// region ************************** generated!.gotpl **************************
+
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
return &executableSchema{
@@ -61,39 +63,39 @@ type DirectiveRoot struct {
type ComplexityRoot struct {
AddCommentOperation struct {
- Hash func(childComplexity int) int
Author func(childComplexity int) int
Date func(childComplexity int) int
- Message func(childComplexity int) int
Files func(childComplexity int) int
+ Hash func(childComplexity int) int
+ Message func(childComplexity int) int
}
AddCommentTimelineItem struct {
- Hash func(childComplexity int) int
Author func(childComplexity int) int
- Message func(childComplexity int) int
- MessageIsEmpty func(childComplexity int) int
- Files func(childComplexity int) int
CreatedAt func(childComplexity int) int
- LastEdit func(childComplexity int) int
Edited func(childComplexity int) int
+ Files func(childComplexity int) int
+ Hash func(childComplexity int) int
History func(childComplexity int) int
+ LastEdit func(childComplexity int) int
+ Message func(childComplexity int) int
+ MessageIsEmpty func(childComplexity int) int
}
Bug struct {
- Id func(childComplexity int) int
- HumanId func(childComplexity int) int
- Status func(childComplexity int) int
- Title func(childComplexity int) int
- Labels func(childComplexity int) int
+ Actors func(childComplexity int, after *string, before *string, first *int, last *int) int
Author func(childComplexity int) int
+ Comments func(childComplexity int, after *string, before *string, first *int, last *int) int
CreatedAt func(childComplexity int) int
+ HumanId func(childComplexity int) int
+ Id func(childComplexity int) int
+ Labels func(childComplexity int) int
LastEdit func(childComplexity int) int
- Actors func(childComplexity int, after *string, before *string, first *int, last *int) int
+ Operations func(childComplexity int, after *string, before *string, first *int, last *int) int
Participants func(childComplexity int, after *string, before *string, first *int, last *int) int
- Comments func(childComplexity int, after *string, before *string, first *int, last *int) int
+ Status func(childComplexity int) int
Timeline func(childComplexity int, after *string, before *string, first *int, last *int) int
- Operations func(childComplexity int, after *string, before *string, first *int, last *int) int
+ Title func(childComplexity int) int
}
BugConnection struct {
@@ -110,8 +112,8 @@ type ComplexityRoot struct {
Comment struct {
Author func(childComplexity int) int
- Message func(childComplexity int) int
Files func(childComplexity int) int
+ Message func(childComplexity int) int
}
CommentConnection struct {
@@ -127,49 +129,49 @@ type ComplexityRoot struct {
}
CommentHistoryStep struct {
- Message func(childComplexity int) int
Date func(childComplexity int) int
+ Message func(childComplexity int) int
}
CreateOperation struct {
- Hash func(childComplexity int) int
Author func(childComplexity int) int
Date func(childComplexity int) int
- Title func(childComplexity int) int
- Message func(childComplexity int) int
Files func(childComplexity int) int
+ Hash func(childComplexity int) int
+ Message func(childComplexity int) int
+ Title func(childComplexity int) int
}
CreateTimelineItem struct {
- Hash func(childComplexity int) int
Author func(childComplexity int) int
- Message func(childComplexity int) int
- MessageIsEmpty func(childComplexity int) int
- Files func(childComplexity int) int
CreatedAt func(childComplexity int) int
- LastEdit func(childComplexity int) int
Edited func(childComplexity int) int
+ Files func(childComplexity int) int
+ Hash func(childComplexity int) int
History func(childComplexity int) int
+ LastEdit func(childComplexity int) int
+ Message func(childComplexity int) int
+ MessageIsEmpty func(childComplexity int) int
}
EditCommentOperation struct {
- Hash func(childComplexity int) int
Author func(childComplexity int) int
Date func(childComplexity int) int
- Target func(childComplexity int) int
- Message func(childComplexity int) int
Files func(childComplexity int) int
+ Hash func(childComplexity int) int
+ Message func(childComplexity int) int
+ Target func(childComplexity int) int
}
Identity struct {
- Id func(childComplexity int) int
- HumanId func(childComplexity int) int
- Name func(childComplexity int) int
- Email func(childComplexity int) int
- Login func(childComplexity int) int
+ AvatarURL func(childComplexity int) int
DisplayName func(childComplexity int) int
- AvatarUrl func(childComplexity int) int
+ Email func(childComplexity int) int
+ HumanID func(childComplexity int) int
+ ID func(childComplexity int) int
IsProtected func(childComplexity int) int
+ Login func(childComplexity int) int
+ Name func(childComplexity int) int
}
IdentityConnection struct {
@@ -185,29 +187,29 @@ type ComplexityRoot struct {
}
LabelChangeOperation struct {
- Hash func(childComplexity int) int
+ Added func(childComplexity int) int
Author func(childComplexity int) int
Date func(childComplexity int) int
- Added func(childComplexity int) int
+ Hash func(childComplexity int) int
Removed func(childComplexity int) int
}
LabelChangeTimelineItem struct {
- Hash func(childComplexity int) int
+ Added func(childComplexity int) int
Author func(childComplexity int) int
Date func(childComplexity int) int
- Added func(childComplexity int) int
+ Hash func(childComplexity int) int
Removed func(childComplexity int) int
}
Mutation struct {
- NewBug func(childComplexity int, repoRef *string, title string, message string, files []git.Hash) int
AddComment func(childComplexity int, repoRef *string, prefix string, message string, files []git.Hash) int
ChangeLabels func(childComplexity int, repoRef *string, prefix string, added []string, removed []string) int
- Open func(childComplexity int, repoRef *string, prefix string) int
Close func(childComplexity int, repoRef *string, prefix string) int
- SetTitle func(childComplexity int, repoRef *string, prefix string, title string) int
Commit func(childComplexity int, repoRef *string, prefix string) int
+ NewBug func(childComplexity int, repoRef *string, title string, message string, files []git.Hash) int
+ Open func(childComplexity int, repoRef *string, prefix string) int
+ SetTitle func(childComplexity int, repoRef *string, prefix string, title string) int
}
OperationConnection struct {
@@ -223,10 +225,10 @@ type ComplexityRoot struct {
}
PageInfo struct {
+ EndCursor func(childComplexity int) int
HasNextPage func(childComplexity int) int
HasPreviousPage func(childComplexity int) int
StartCursor func(childComplexity int) int
- EndCursor func(childComplexity int) int
}
Query struct {
@@ -236,38 +238,38 @@ type ComplexityRoot struct {
Repository struct {
AllBugs func(childComplexity int, after *string, before *string, first *int, last *int, query *string) int
- Bug func(childComplexity int, prefix string) int
AllIdentities func(childComplexity int, after *string, before *string, first *int, last *int) int
+ Bug func(childComplexity int, prefix string) int
Identity func(childComplexity int, prefix string) int
UserIdentity func(childComplexity int) int
}
SetStatusOperation struct {
- Hash func(childComplexity int) int
Author func(childComplexity int) int
Date func(childComplexity int) int
+ Hash func(childComplexity int) int
Status func(childComplexity int) int
}
SetStatusTimelineItem struct {
- Hash func(childComplexity int) int
Author func(childComplexity int) int
Date func(childComplexity int) int
+ Hash func(childComplexity int) int
Status func(childComplexity int) int
}
SetTitleOperation struct {
- Hash func(childComplexity int) int
Author func(childComplexity int) int
Date func(childComplexity int) int
+ Hash func(childComplexity int) int
Title func(childComplexity int) int
Was func(childComplexity int) int
}
SetTitleTimelineItem struct {
- Hash func(childComplexity int) int
Author func(childComplexity int) int
Date func(childComplexity int) int
+ Hash func(childComplexity int) int
Title func(childComplexity int) int
Was func(childComplexity int) int
}
@@ -286,34 +288,34 @@ type ComplexityRoot struct {
}
type AddCommentOperationResolver interface {
- Date(ctx context.Context, obj *bug.AddCommentOperation) (time.Time, error)
+ Date(ctx context.Context, obj *bug.AddCommentOperation) (*time.Time, error)
}
type AddCommentTimelineItemResolver interface {
- CreatedAt(ctx context.Context, obj *bug.AddCommentTimelineItem) (time.Time, error)
- LastEdit(ctx context.Context, obj *bug.AddCommentTimelineItem) (time.Time, error)
+ CreatedAt(ctx context.Context, obj *bug.AddCommentTimelineItem) (*time.Time, error)
+ LastEdit(ctx context.Context, obj *bug.AddCommentTimelineItem) (*time.Time, error)
}
type BugResolver interface {
Status(ctx context.Context, obj *bug.Snapshot) (models.Status, error)
- LastEdit(ctx context.Context, obj *bug.Snapshot) (time.Time, error)
- Actors(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.IdentityConnection, error)
- Participants(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.IdentityConnection, error)
- Comments(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.CommentConnection, error)
- Timeline(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.TimelineItemConnection, error)
- Operations(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.OperationConnection, error)
+ LastEdit(ctx context.Context, obj *bug.Snapshot) (*time.Time, error)
+ Actors(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.IdentityConnection, error)
+ Participants(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.IdentityConnection, error)
+ Comments(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.CommentConnection, error)
+ Timeline(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.TimelineItemConnection, error)
+ Operations(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.OperationConnection, error)
}
type CommentHistoryStepResolver interface {
- Date(ctx context.Context, obj *bug.CommentHistoryStep) (time.Time, error)
+ Date(ctx context.Context, obj *bug.CommentHistoryStep) (*time.Time, error)
}
type CreateOperationResolver interface {
- Date(ctx context.Context, obj *bug.CreateOperation) (time.Time, error)
+ Date(ctx context.Context, obj *bug.CreateOperation) (*time.Time, error)
}
type CreateTimelineItemResolver interface {
- CreatedAt(ctx context.Context, obj *bug.CreateTimelineItem) (time.Time, error)
- LastEdit(ctx context.Context, obj *bug.CreateTimelineItem) (time.Time, error)
+ CreatedAt(ctx context.Context, obj *bug.CreateTimelineItem) (*time.Time, error)
+ LastEdit(ctx context.Context, obj *bug.CreateTimelineItem) (*time.Time, error)
}
type EditCommentOperationResolver interface {
- Date(ctx context.Context, obj *bug.EditCommentOperation) (time.Time, error)
+ Date(ctx context.Context, obj *bug.EditCommentOperation) (*time.Time, error)
}
type IdentityResolver interface {
ID(ctx context.Context, obj *identity.Interface) (string, error)
@@ -326,892 +328,44 @@ type IdentityResolver interface {
IsProtected(ctx context.Context, obj *identity.Interface) (bool, error)
}
type LabelChangeOperationResolver interface {
- Date(ctx context.Context, obj *bug.LabelChangeOperation) (time.Time, error)
+ Date(ctx context.Context, obj *bug.LabelChangeOperation) (*time.Time, error)
}
type LabelChangeTimelineItemResolver interface {
- Date(ctx context.Context, obj *bug.LabelChangeTimelineItem) (time.Time, error)
+ Date(ctx context.Context, obj *bug.LabelChangeTimelineItem) (*time.Time, error)
}
type MutationResolver interface {
- NewBug(ctx context.Context, repoRef *string, title string, message string, files []git.Hash) (bug.Snapshot, error)
- AddComment(ctx context.Context, repoRef *string, prefix string, message string, files []git.Hash) (bug.Snapshot, error)
- ChangeLabels(ctx context.Context, repoRef *string, prefix string, added []string, removed []string) (bug.Snapshot, error)
- Open(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error)
- Close(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error)
- SetTitle(ctx context.Context, repoRef *string, prefix string, title string) (bug.Snapshot, error)
- Commit(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error)
+ NewBug(ctx context.Context, repoRef *string, title string, message string, files []git.Hash) (*bug.Snapshot, error)
+ AddComment(ctx context.Context, repoRef *string, prefix string, message string, files []git.Hash) (*bug.Snapshot, error)
+ ChangeLabels(ctx context.Context, repoRef *string, prefix string, added []string, removed []string) (*bug.Snapshot, error)
+ Open(ctx context.Context, repoRef *string, prefix string) (*bug.Snapshot, error)
+ Close(ctx context.Context, repoRef *string, prefix string) (*bug.Snapshot, error)
+ SetTitle(ctx context.Context, repoRef *string, prefix string, title string) (*bug.Snapshot, error)
+ Commit(ctx context.Context, repoRef *string, prefix string) (*bug.Snapshot, error)
}
type QueryResolver interface {
DefaultRepository(ctx context.Context) (*models.Repository, error)
Repository(ctx context.Context, id string) (*models.Repository, error)
}
type RepositoryResolver interface {
- AllBugs(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int, query *string) (models.BugConnection, error)
+ AllBugs(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int, query *string) (*models.BugConnection, error)
Bug(ctx context.Context, obj *models.Repository, prefix string) (*bug.Snapshot, error)
- AllIdentities(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int) (models.IdentityConnection, error)
- Identity(ctx context.Context, obj *models.Repository, prefix string) (*identity.Interface, error)
- UserIdentity(ctx context.Context, obj *models.Repository) (*identity.Interface, error)
+ AllIdentities(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int) (*models.IdentityConnection, error)
+ Identity(ctx context.Context, obj *models.Repository, prefix string) (identity.Interface, error)
+ UserIdentity(ctx context.Context, obj *models.Repository) (identity.Interface, error)
}
type SetStatusOperationResolver interface {
- Date(ctx context.Context, obj *bug.SetStatusOperation) (time.Time, error)
+ Date(ctx context.Context, obj *bug.SetStatusOperation) (*time.Time, error)
Status(ctx context.Context, obj *bug.SetStatusOperation) (models.Status, error)
}
type SetStatusTimelineItemResolver interface {
- Date(ctx context.Context, obj *bug.SetStatusTimelineItem) (time.Time, error)
+ Date(ctx context.Context, obj *bug.SetStatusTimelineItem) (*time.Time, error)
Status(ctx context.Context, obj *bug.SetStatusTimelineItem) (models.Status, error)
}
type SetTitleOperationResolver interface {
- Date(ctx context.Context, obj *bug.SetTitleOperation) (time.Time, error)
+ Date(ctx context.Context, obj *bug.SetTitleOperation) (*time.Time, error)
}
type SetTitleTimelineItemResolver interface {
- Date(ctx context.Context, obj *bug.SetTitleTimelineItem) (time.Time, error)
-}
-
-func field_Bug_actors_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := rawArgs["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := rawArgs["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := rawArgs["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["last"] = arg3
- return args, nil
-
-}
-
-func field_Bug_participants_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := rawArgs["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := rawArgs["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := rawArgs["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["last"] = arg3
- return args, nil
-
-}
-
-func field_Bug_comments_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := rawArgs["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := rawArgs["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := rawArgs["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["last"] = arg3
- return args, nil
-
-}
-
-func field_Bug_timeline_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := rawArgs["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := rawArgs["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := rawArgs["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["last"] = arg3
- return args, nil
-
-}
-
-func field_Bug_operations_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := rawArgs["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := rawArgs["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := rawArgs["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["last"] = arg3
- return args, nil
-
-}
-
-func field_Mutation_newBug_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := rawArgs["title"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["title"] = arg1
- var arg2 string
- if tmp, ok := rawArgs["message"]; ok {
- var err error
- arg2, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["message"] = arg2
- var arg3 []git.Hash
- if tmp, ok := rawArgs["files"]; ok {
- var err error
- var rawIf1 []interface{}
- if tmp != nil {
- if tmp1, ok := tmp.([]interface{}); ok {
- rawIf1 = tmp1
- } else {
- rawIf1 = []interface{}{tmp}
- }
- }
- arg3 = make([]git.Hash, len(rawIf1))
- for idx1 := range rawIf1 {
- err = (&arg3[idx1]).UnmarshalGQL(rawIf1[idx1])
- }
- if err != nil {
- return nil, err
- }
- }
- args["files"] = arg3
- return args, nil
-
-}
-
-func field_Mutation_addComment_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := rawArgs["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["prefix"] = arg1
- var arg2 string
- if tmp, ok := rawArgs["message"]; ok {
- var err error
- arg2, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["message"] = arg2
- var arg3 []git.Hash
- if tmp, ok := rawArgs["files"]; ok {
- var err error
- var rawIf1 []interface{}
- if tmp != nil {
- if tmp1, ok := tmp.([]interface{}); ok {
- rawIf1 = tmp1
- } else {
- rawIf1 = []interface{}{tmp}
- }
- }
- arg3 = make([]git.Hash, len(rawIf1))
- for idx1 := range rawIf1 {
- err = (&arg3[idx1]).UnmarshalGQL(rawIf1[idx1])
- }
- if err != nil {
- return nil, err
- }
- }
- args["files"] = arg3
- return args, nil
-
-}
-
-func field_Mutation_changeLabels_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := rawArgs["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["prefix"] = arg1
- var arg2 []string
- if tmp, ok := rawArgs["added"]; ok {
- var err error
- var rawIf1 []interface{}
- if tmp != nil {
- if tmp1, ok := tmp.([]interface{}); ok {
- rawIf1 = tmp1
- } else {
- rawIf1 = []interface{}{tmp}
- }
- }
- arg2 = make([]string, len(rawIf1))
- for idx1 := range rawIf1 {
- arg2[idx1], err = graphql.UnmarshalString(rawIf1[idx1])
- }
- if err != nil {
- return nil, err
- }
- }
- args["added"] = arg2
- var arg3 []string
- if tmp, ok := rawArgs["removed"]; ok {
- var err error
- var rawIf1 []interface{}
- if tmp != nil {
- if tmp1, ok := tmp.([]interface{}); ok {
- rawIf1 = tmp1
- } else {
- rawIf1 = []interface{}{tmp}
- }
- }
- arg3 = make([]string, len(rawIf1))
- for idx1 := range rawIf1 {
- arg3[idx1], err = graphql.UnmarshalString(rawIf1[idx1])
- }
- if err != nil {
- return nil, err
- }
- }
- args["removed"] = arg3
- return args, nil
-
-}
-
-func field_Mutation_open_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := rawArgs["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["prefix"] = arg1
- return args, nil
-
-}
-
-func field_Mutation_close_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := rawArgs["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["prefix"] = arg1
- return args, nil
-
-}
-
-func field_Mutation_setTitle_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := rawArgs["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["prefix"] = arg1
- var arg2 string
- if tmp, ok := rawArgs["title"]; ok {
- var err error
- arg2, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["title"] = arg2
- return args, nil
-
-}
-
-func field_Mutation_commit_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["repoRef"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["repoRef"] = arg0
- var arg1 string
- if tmp, ok := rawArgs["prefix"]; ok {
- var err error
- arg1, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["prefix"] = arg1
- return args, nil
-
-}
-
-func field_Query_repository_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 string
- if tmp, ok := rawArgs["id"]; ok {
- var err error
- arg0, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["id"] = arg0
- return args, nil
-
-}
-
-func field_Query___type_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 string
- if tmp, ok := rawArgs["name"]; ok {
- var err error
- arg0, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["name"] = arg0
- return args, nil
-
-}
-
-func field_Repository_allBugs_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := rawArgs["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := rawArgs["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := rawArgs["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["last"] = arg3
- var arg4 *string
- if tmp, ok := rawArgs["query"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg4 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["query"] = arg4
- return args, nil
-
-}
-
-func field_Repository_bug_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 string
- if tmp, ok := rawArgs["prefix"]; ok {
- var err error
- arg0, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["prefix"] = arg0
- return args, nil
-
-}
-
-func field_Repository_allIdentities_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 *string
- if tmp, ok := rawArgs["after"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg0 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["after"] = arg0
- var arg1 *string
- if tmp, ok := rawArgs["before"]; ok {
- var err error
- var ptr1 string
- if tmp != nil {
- ptr1, err = graphql.UnmarshalString(tmp)
- arg1 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["before"] = arg1
- var arg2 *int
- if tmp, ok := rawArgs["first"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg2 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["first"] = arg2
- var arg3 *int
- if tmp, ok := rawArgs["last"]; ok {
- var err error
- var ptr1 int
- if tmp != nil {
- ptr1, err = graphql.UnmarshalInt(tmp)
- arg3 = &ptr1
- }
-
- if err != nil {
- return nil, err
- }
- }
- args["last"] = arg3
- return args, nil
-
-}
-
-func field_Repository_identity_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 string
- if tmp, ok := rawArgs["prefix"]; ok {
- var err error
- arg0, err = graphql.UnmarshalString(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["prefix"] = arg0
- return args, nil
-
-}
-
-func field___Type_fields_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 bool
- if tmp, ok := rawArgs["includeDeprecated"]; ok {
- var err error
- arg0, err = graphql.UnmarshalBoolean(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["includeDeprecated"] = arg0
- return args, nil
-
-}
-
-func field___Type_enumValues_args(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- args := map[string]interface{}{}
- var arg0 bool
- if tmp, ok := rawArgs["includeDeprecated"]; ok {
- var err error
- arg0, err = graphql.UnmarshalBoolean(tmp)
- if err != nil {
- return nil, err
- }
- }
- args["includeDeprecated"] = arg0
- return args, nil
-
+ Date(ctx context.Context, obj *bug.SetTitleTimelineItem) (*time.Time, error)
}
type executableSchema struct {
@@ -1225,1046 +379,1048 @@ func (e *executableSchema) Schema() *ast.Schema {
}
func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
+ ec := executionContext{nil, e}
+ _ = ec
switch typeName + "." + field {
- case "AddCommentOperation.hash":
- if e.complexity.AddCommentOperation.Hash == nil {
- break
- }
-
- return e.complexity.AddCommentOperation.Hash(childComplexity), true
-
- case "AddCommentOperation.author":
+ case "AddCommentOperation.Author":
if e.complexity.AddCommentOperation.Author == nil {
break
}
return e.complexity.AddCommentOperation.Author(childComplexity), true
- case "AddCommentOperation.date":
+ case "AddCommentOperation.Date":
if e.complexity.AddCommentOperation.Date == nil {
break
}
return e.complexity.AddCommentOperation.Date(childComplexity), true
- case "AddCommentOperation.message":
- if e.complexity.AddCommentOperation.Message == nil {
+ case "AddCommentOperation.Files":
+ if e.complexity.AddCommentOperation.Files == nil {
break
}
- return e.complexity.AddCommentOperation.Message(childComplexity), true
+ return e.complexity.AddCommentOperation.Files(childComplexity), true
- case "AddCommentOperation.files":
- if e.complexity.AddCommentOperation.Files == nil {
+ case "AddCommentOperation.Hash":
+ if e.complexity.AddCommentOperation.Hash == nil {
break
}
- return e.complexity.AddCommentOperation.Files(childComplexity), true
+ return e.complexity.AddCommentOperation.Hash(childComplexity), true
- case "AddCommentTimelineItem.hash":
- if e.complexity.AddCommentTimelineItem.Hash == nil {
+ case "AddCommentOperation.Message":
+ if e.complexity.AddCommentOperation.Message == nil {
break
}
- return e.complexity.AddCommentTimelineItem.Hash(childComplexity), true
+ return e.complexity.AddCommentOperation.Message(childComplexity), true
- case "AddCommentTimelineItem.author":
+ case "AddCommentTimelineItem.Author":
if e.complexity.AddCommentTimelineItem.Author == nil {
break
}
return e.complexity.AddCommentTimelineItem.Author(childComplexity), true
- case "AddCommentTimelineItem.message":
- if e.complexity.AddCommentTimelineItem.Message == nil {
+ case "AddCommentTimelineItem.CreatedAt":
+ if e.complexity.AddCommentTimelineItem.CreatedAt == nil {
break
}
- return e.complexity.AddCommentTimelineItem.Message(childComplexity), true
+ return e.complexity.AddCommentTimelineItem.CreatedAt(childComplexity), true
- case "AddCommentTimelineItem.messageIsEmpty":
- if e.complexity.AddCommentTimelineItem.MessageIsEmpty == nil {
+ case "AddCommentTimelineItem.Edited":
+ if e.complexity.AddCommentTimelineItem.Edited == nil {
break
}
- return e.complexity.AddCommentTimelineItem.MessageIsEmpty(childComplexity), true
+ return e.complexity.AddCommentTimelineItem.Edited(childComplexity), true
- case "AddCommentTimelineItem.files":
+ case "AddCommentTimelineItem.Files":
if e.complexity.AddCommentTimelineItem.Files == nil {
break
}
return e.complexity.AddCommentTimelineItem.Files(childComplexity), true
- case "AddCommentTimelineItem.createdAt":
- if e.complexity.AddCommentTimelineItem.CreatedAt == nil {
+ case "AddCommentTimelineItem.Hash":
+ if e.complexity.AddCommentTimelineItem.Hash == nil {
break
}
- return e.complexity.AddCommentTimelineItem.CreatedAt(childComplexity), true
+ return e.complexity.AddCommentTimelineItem.Hash(childComplexity), true
- case "AddCommentTimelineItem.lastEdit":
+ case "AddCommentTimelineItem.History":
+ if e.complexity.AddCommentTimelineItem.History == nil {
+ break
+ }
+
+ return e.complexity.AddCommentTimelineItem.History(childComplexity), true
+
+ case "AddCommentTimelineItem.LastEdit":
if e.complexity.AddCommentTimelineItem.LastEdit == nil {
break
}
return e.complexity.AddCommentTimelineItem.LastEdit(childComplexity), true
- case "AddCommentTimelineItem.edited":
- if e.complexity.AddCommentTimelineItem.Edited == nil {
+ case "AddCommentTimelineItem.Message":
+ if e.complexity.AddCommentTimelineItem.Message == nil {
break
}
- return e.complexity.AddCommentTimelineItem.Edited(childComplexity), true
+ return e.complexity.AddCommentTimelineItem.Message(childComplexity), true
- case "AddCommentTimelineItem.history":
- if e.complexity.AddCommentTimelineItem.History == nil {
+ case "AddCommentTimelineItem.MessageIsEmpty":
+ if e.complexity.AddCommentTimelineItem.MessageIsEmpty == nil {
break
}
- return e.complexity.AddCommentTimelineItem.History(childComplexity), true
+ return e.complexity.AddCommentTimelineItem.MessageIsEmpty(childComplexity), true
- case "Bug.id":
- if e.complexity.Bug.Id == nil {
+ case "Bug.Actors":
+ if e.complexity.Bug.Actors == nil {
break
}
- return e.complexity.Bug.Id(childComplexity), true
+ args, err := ec.field_Bug_actors_args(context.TODO(), rawArgs)
+ if err != nil {
+ return 0, false
+ }
- case "Bug.humanId":
- if e.complexity.Bug.HumanId == nil {
+ return e.complexity.Bug.Actors(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
+
+ case "Bug.Author":
+ if e.complexity.Bug.Author == nil {
break
}
- return e.complexity.Bug.HumanId(childComplexity), true
+ return e.complexity.Bug.Author(childComplexity), true
- case "Bug.status":
- if e.complexity.Bug.Status == nil {
+ case "Bug.Comments":
+ if e.complexity.Bug.Comments == nil {
break
}
- return e.complexity.Bug.Status(childComplexity), true
+ args, err := ec.field_Bug_comments_args(context.TODO(), rawArgs)
+ if err != nil {
+ return 0, false
+ }
- case "Bug.title":
- if e.complexity.Bug.Title == nil {
+ return e.complexity.Bug.Comments(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
+
+ case "Bug.CreatedAt":
+ if e.complexity.Bug.CreatedAt == nil {
break
}
- return e.complexity.Bug.Title(childComplexity), true
+ return e.complexity.Bug.CreatedAt(childComplexity), true
- case "Bug.labels":
- if e.complexity.Bug.Labels == nil {
+ case "Bug.HumanId":
+ if e.complexity.Bug.HumanId == nil {
break
}
- return e.complexity.Bug.Labels(childComplexity), true
+ return e.complexity.Bug.HumanId(childComplexity), true
- case "Bug.author":
- if e.complexity.Bug.Author == nil {
+ case "Bug.Id":
+ if e.complexity.Bug.Id == nil {
break
}
- return e.complexity.Bug.Author(childComplexity), true
+ return e.complexity.Bug.Id(childComplexity), true
- case "Bug.createdAt":
- if e.complexity.Bug.CreatedAt == nil {
+ case "Bug.Labels":
+ if e.complexity.Bug.Labels == nil {
break
}
- return e.complexity.Bug.CreatedAt(childComplexity), true
+ return e.complexity.Bug.Labels(childComplexity), true
- case "Bug.lastEdit":
+ case "Bug.LastEdit":
if e.complexity.Bug.LastEdit == nil {
break
}
return e.complexity.Bug.LastEdit(childComplexity), true
- case "Bug.actors":
- if e.complexity.Bug.Actors == nil {
+ case "Bug.Operations":
+ if e.complexity.Bug.Operations == nil {
break
}
- args, err := field_Bug_actors_args(rawArgs)
+ args, err := ec.field_Bug_operations_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Bug.Actors(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
+ return e.complexity.Bug.Operations(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
- case "Bug.participants":
+ case "Bug.Participants":
if e.complexity.Bug.Participants == nil {
break
}
- args, err := field_Bug_participants_args(rawArgs)
+ args, err := ec.field_Bug_participants_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Bug.Participants(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
- case "Bug.comments":
- if e.complexity.Bug.Comments == nil {
+ case "Bug.Status":
+ if e.complexity.Bug.Status == nil {
break
}
- args, err := field_Bug_comments_args(rawArgs)
- if err != nil {
- return 0, false
- }
-
- return e.complexity.Bug.Comments(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
+ return e.complexity.Bug.Status(childComplexity), true
- case "Bug.timeline":
+ case "Bug.Timeline":
if e.complexity.Bug.Timeline == nil {
break
}
- args, err := field_Bug_timeline_args(rawArgs)
+ args, err := ec.field_Bug_timeline_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Bug.Timeline(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
- case "Bug.operations":
- if e.complexity.Bug.Operations == nil {
+ case "Bug.Title":
+ if e.complexity.Bug.Title == nil {
break
}
- args, err := field_Bug_operations_args(rawArgs)
- if err != nil {
- return 0, false
- }
-
- return e.complexity.Bug.Operations(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
+ return e.complexity.Bug.Title(childComplexity), true
- case "BugConnection.edges":
+ case "BugConnection.Edges":
if e.complexity.BugConnection.Edges == nil {
break
}
return e.complexity.BugConnection.Edges(childComplexity), true
- case "BugConnection.nodes":
+ case "BugConnection.Nodes":
if e.complexity.BugConnection.Nodes == nil {
break
}
return e.complexity.BugConnection.Nodes(childComplexity), true
- case "BugConnection.pageInfo":
+ case "BugConnection.PageInfo":
if e.complexity.BugConnection.PageInfo == nil {
break
}
return e.complexity.BugConnection.PageInfo(childComplexity), true
- case "BugConnection.totalCount":
+ case "BugConnection.TotalCount":
if e.complexity.BugConnection.TotalCount == nil {
break
}
return e.complexity.BugConnection.TotalCount(childComplexity), true
- case "BugEdge.cursor":
+ case "BugEdge.Cursor":
if e.complexity.BugEdge.Cursor == nil {
break
}
return e.complexity.BugEdge.Cursor(childComplexity), true
- case "BugEdge.node":
+ case "BugEdge.Node":
if e.complexity.BugEdge.Node == nil {
break
}
return e.complexity.BugEdge.Node(childComplexity), true
- case "Comment.author":
+ case "Comment.Author":
if e.complexity.Comment.Author == nil {
break
}
return e.complexity.Comment.Author(childComplexity), true
- case "Comment.message":
- if e.complexity.Comment.Message == nil {
+ case "Comment.Files":
+ if e.complexity.Comment.Files == nil {
break
}
- return e.complexity.Comment.Message(childComplexity), true
+ return e.complexity.Comment.Files(childComplexity), true
- case "Comment.files":
- if e.complexity.Comment.Files == nil {
+ case "Comment.Message":
+ if e.complexity.Comment.Message == nil {
break
}
- return e.complexity.Comment.Files(childComplexity), true
+ return e.complexity.Comment.Message(childComplexity), true
- case "CommentConnection.edges":
+ case "CommentConnection.Edges":
if e.complexity.CommentConnection.Edges == nil {
break
}
return e.complexity.CommentConnection.Edges(childComplexity), true
- case "CommentConnection.nodes":
+ case "CommentConnection.Nodes":
if e.complexity.CommentConnection.Nodes == nil {
break
}
return e.complexity.CommentConnection.Nodes(childComplexity), true
- case "CommentConnection.pageInfo":
+ case "CommentConnection.PageInfo":
if e.complexity.CommentConnection.PageInfo == nil {
break
}
return e.complexity.CommentConnection.PageInfo(childComplexity), true
- case "CommentConnection.totalCount":
+ case "CommentConnection.TotalCount":
if e.complexity.CommentConnection.TotalCount == nil {
break
}
return e.complexity.CommentConnection.TotalCount(childComplexity), true
- case "CommentEdge.cursor":
+ case "CommentEdge.Cursor":
if e.complexity.CommentEdge.Cursor == nil {
break
}
return e.complexity.CommentEdge.Cursor(childComplexity), true
- case "CommentEdge.node":
+ case "CommentEdge.Node":
if e.complexity.CommentEdge.Node == nil {
break
}
return e.complexity.CommentEdge.Node(childComplexity), true
- case "CommentHistoryStep.message":
- if e.complexity.CommentHistoryStep.Message == nil {
- break
- }
-
- return e.complexity.CommentHistoryStep.Message(childComplexity), true
-
- case "CommentHistoryStep.date":
+ case "CommentHistoryStep.Date":
if e.complexity.CommentHistoryStep.Date == nil {
break
}
return e.complexity.CommentHistoryStep.Date(childComplexity), true
- case "CreateOperation.hash":
- if e.complexity.CreateOperation.Hash == nil {
+ case "CommentHistoryStep.Message":
+ if e.complexity.CommentHistoryStep.Message == nil {
break
}
- return e.complexity.CreateOperation.Hash(childComplexity), true
+ return e.complexity.CommentHistoryStep.Message(childComplexity), true
- case "CreateOperation.author":
+ case "CreateOperation.Author":
if e.complexity.CreateOperation.Author == nil {
break
}
return e.complexity.CreateOperation.Author(childComplexity), true
- case "CreateOperation.date":
+ case "CreateOperation.Date":
if e.complexity.CreateOperation.Date == nil {
break
}
return e.complexity.CreateOperation.Date(childComplexity), true
- case "CreateOperation.title":
- if e.complexity.CreateOperation.Title == nil {
+ case "CreateOperation.Files":
+ if e.complexity.CreateOperation.Files == nil {
break
}
- return e.complexity.CreateOperation.Title(childComplexity), true
+ return e.complexity.CreateOperation.Files(childComplexity), true
- case "CreateOperation.message":
- if e.complexity.CreateOperation.Message == nil {
+ case "CreateOperation.Hash":
+ if e.complexity.CreateOperation.Hash == nil {
break
}
- return e.complexity.CreateOperation.Message(childComplexity), true
+ return e.complexity.CreateOperation.Hash(childComplexity), true
- case "CreateOperation.files":
- if e.complexity.CreateOperation.Files == nil {
+ case "CreateOperation.Message":
+ if e.complexity.CreateOperation.Message == nil {
break
}
- return e.complexity.CreateOperation.Files(childComplexity), true
+ return e.complexity.CreateOperation.Message(childComplexity), true
- case "CreateTimelineItem.hash":
- if e.complexity.CreateTimelineItem.Hash == nil {
+ case "CreateOperation.Title":
+ if e.complexity.CreateOperation.Title == nil {
break
}
- return e.complexity.CreateTimelineItem.Hash(childComplexity), true
+ return e.complexity.CreateOperation.Title(childComplexity), true
- case "CreateTimelineItem.author":
+ case "CreateTimelineItem.Author":
if e.complexity.CreateTimelineItem.Author == nil {
break
}
return e.complexity.CreateTimelineItem.Author(childComplexity), true
- case "CreateTimelineItem.message":
- if e.complexity.CreateTimelineItem.Message == nil {
+ case "CreateTimelineItem.CreatedAt":
+ if e.complexity.CreateTimelineItem.CreatedAt == nil {
break
}
- return e.complexity.CreateTimelineItem.Message(childComplexity), true
+ return e.complexity.CreateTimelineItem.CreatedAt(childComplexity), true
- case "CreateTimelineItem.messageIsEmpty":
- if e.complexity.CreateTimelineItem.MessageIsEmpty == nil {
+ case "CreateTimelineItem.Edited":
+ if e.complexity.CreateTimelineItem.Edited == nil {
break
}
- return e.complexity.CreateTimelineItem.MessageIsEmpty(childComplexity), true
+ return e.complexity.CreateTimelineItem.Edited(childComplexity), true
- case "CreateTimelineItem.files":
+ case "CreateTimelineItem.Files":
if e.complexity.CreateTimelineItem.Files == nil {
break
}
return e.complexity.CreateTimelineItem.Files(childComplexity), true
- case "CreateTimelineItem.createdAt":
- if e.complexity.CreateTimelineItem.CreatedAt == nil {
+ case "CreateTimelineItem.Hash":
+ if e.complexity.CreateTimelineItem.Hash == nil {
break
}
- return e.complexity.CreateTimelineItem.CreatedAt(childComplexity), true
+ return e.complexity.CreateTimelineItem.Hash(childComplexity), true
- case "CreateTimelineItem.lastEdit":
- if e.complexity.CreateTimelineItem.LastEdit == nil {
+ case "CreateTimelineItem.History":
+ if e.complexity.CreateTimelineItem.History == nil {
break
}
- return e.complexity.CreateTimelineItem.LastEdit(childComplexity), true
+ return e.complexity.CreateTimelineItem.History(childComplexity), true
- case "CreateTimelineItem.edited":
- if e.complexity.CreateTimelineItem.Edited == nil {
+ case "CreateTimelineItem.LastEdit":
+ if e.complexity.CreateTimelineItem.LastEdit == nil {
break
}
- return e.complexity.CreateTimelineItem.Edited(childComplexity), true
+ return e.complexity.CreateTimelineItem.LastEdit(childComplexity), true
- case "CreateTimelineItem.history":
- if e.complexity.CreateTimelineItem.History == nil {
+ case "CreateTimelineItem.Message":
+ if e.complexity.CreateTimelineItem.Message == nil {
break
}
- return e.complexity.CreateTimelineItem.History(childComplexity), true
+ return e.complexity.CreateTimelineItem.Message(childComplexity), true
- case "EditCommentOperation.hash":
- if e.complexity.EditCommentOperation.Hash == nil {
+ case "CreateTimelineItem.MessageIsEmpty":
+ if e.complexity.CreateTimelineItem.MessageIsEmpty == nil {
break
}
- return e.complexity.EditCommentOperation.Hash(childComplexity), true
+ return e.complexity.CreateTimelineItem.MessageIsEmpty(childComplexity), true
- case "EditCommentOperation.author":
+ case "EditCommentOperation.Author":
if e.complexity.EditCommentOperation.Author == nil {
break
}
return e.complexity.EditCommentOperation.Author(childComplexity), true
- case "EditCommentOperation.date":
+ case "EditCommentOperation.Date":
if e.complexity.EditCommentOperation.Date == nil {
break
}
return e.complexity.EditCommentOperation.Date(childComplexity), true
- case "EditCommentOperation.target":
- if e.complexity.EditCommentOperation.Target == nil {
+ case "EditCommentOperation.Files":
+ if e.complexity.EditCommentOperation.Files == nil {
break
}
- return e.complexity.EditCommentOperation.Target(childComplexity), true
+ return e.complexity.EditCommentOperation.Files(childComplexity), true
- case "EditCommentOperation.message":
- if e.complexity.EditCommentOperation.Message == nil {
+ case "EditCommentOperation.Hash":
+ if e.complexity.EditCommentOperation.Hash == nil {
break
}
- return e.complexity.EditCommentOperation.Message(childComplexity), true
+ return e.complexity.EditCommentOperation.Hash(childComplexity), true
- case "EditCommentOperation.files":
- if e.complexity.EditCommentOperation.Files == nil {
+ case "EditCommentOperation.Message":
+ if e.complexity.EditCommentOperation.Message == nil {
break
}
- return e.complexity.EditCommentOperation.Files(childComplexity), true
+ return e.complexity.EditCommentOperation.Message(childComplexity), true
- case "Identity.id":
- if e.complexity.Identity.Id == nil {
+ case "EditCommentOperation.Target":
+ if e.complexity.EditCommentOperation.Target == nil {
break
}
- return e.complexity.Identity.Id(childComplexity), true
+ return e.complexity.EditCommentOperation.Target(childComplexity), true
- case "Identity.humanId":
- if e.complexity.Identity.HumanId == nil {
+ case "Identity.AvatarURL":
+ if e.complexity.Identity.AvatarURL == nil {
break
}
- return e.complexity.Identity.HumanId(childComplexity), true
+ return e.complexity.Identity.AvatarURL(childComplexity), true
- case "Identity.name":
- if e.complexity.Identity.Name == nil {
+ case "Identity.DisplayName":
+ if e.complexity.Identity.DisplayName == nil {
break
}
- return e.complexity.Identity.Name(childComplexity), true
+ return e.complexity.Identity.DisplayName(childComplexity), true
- case "Identity.email":
+ case "Identity.Email":
if e.complexity.Identity.Email == nil {
break
}
return e.complexity.Identity.Email(childComplexity), true
- case "Identity.login":
- if e.complexity.Identity.Login == nil {
+ case "Identity.HumanID":
+ if e.complexity.Identity.HumanID == nil {
break
}
- return e.complexity.Identity.Login(childComplexity), true
+ return e.complexity.Identity.HumanID(childComplexity), true
- case "Identity.displayName":
- if e.complexity.Identity.DisplayName == nil {
+ case "Identity.ID":
+ if e.complexity.Identity.ID == nil {
break
}
- return e.complexity.Identity.DisplayName(childComplexity), true
+ return e.complexity.Identity.ID(childComplexity), true
- case "Identity.avatarUrl":
- if e.complexity.Identity.AvatarUrl == nil {
+ case "Identity.IsProtected":
+ if e.complexity.Identity.IsProtected == nil {
break
}
- return e.complexity.Identity.AvatarUrl(childComplexity), true
+ return e.complexity.Identity.IsProtected(childComplexity), true
- case "Identity.isProtected":
- if e.complexity.Identity.IsProtected == nil {
+ case "Identity.Login":
+ if e.complexity.Identity.Login == nil {
break
}
- return e.complexity.Identity.IsProtected(childComplexity), true
+ return e.complexity.Identity.Login(childComplexity), true
+
+ case "Identity.Name":
+ if e.complexity.Identity.Name == nil {
+ break
+ }
- case "IdentityConnection.edges":
+ return e.complexity.Identity.Name(childComplexity), true
+
+ case "IdentityConnection.Edges":
if e.complexity.IdentityConnection.Edges == nil {
break
}
return e.complexity.IdentityConnection.Edges(childComplexity), true
- case "IdentityConnection.nodes":
+ case "IdentityConnection.Nodes":
if e.complexity.IdentityConnection.Nodes == nil {
break
}
return e.complexity.IdentityConnection.Nodes(childComplexity), true
- case "IdentityConnection.pageInfo":
+ case "IdentityConnection.PageInfo":
if e.complexity.IdentityConnection.PageInfo == nil {
break
}
return e.complexity.IdentityConnection.PageInfo(childComplexity), true
- case "IdentityConnection.totalCount":
+ case "IdentityConnection.TotalCount":
if e.complexity.IdentityConnection.TotalCount == nil {
break
}
return e.complexity.IdentityConnection.TotalCount(childComplexity), true
- case "IdentityEdge.cursor":
+ case "IdentityEdge.Cursor":
if e.complexity.IdentityEdge.Cursor == nil {
break
}
return e.complexity.IdentityEdge.Cursor(childComplexity), true
- case "IdentityEdge.node":
+ case "IdentityEdge.Node":
if e.complexity.IdentityEdge.Node == nil {
break
}
return e.complexity.IdentityEdge.Node(childComplexity), true
- case "LabelChangeOperation.hash":
- if e.complexity.LabelChangeOperation.Hash == nil {
+ case "LabelChangeOperation.Added":
+ if e.complexity.LabelChangeOperation.Added == nil {
break
}
- return e.complexity.LabelChangeOperation.Hash(childComplexity), true
+ return e.complexity.LabelChangeOperation.Added(childComplexity), true
- case "LabelChangeOperation.author":
+ case "LabelChangeOperation.Author":
if e.complexity.LabelChangeOperation.Author == nil {
break
}
return e.complexity.LabelChangeOperation.Author(childComplexity), true
- case "LabelChangeOperation.date":
+ case "LabelChangeOperation.Date":
if e.complexity.LabelChangeOperation.Date == nil {
break
}
return e.complexity.LabelChangeOperation.Date(childComplexity), true
- case "LabelChangeOperation.added":
- if e.complexity.LabelChangeOperation.Added == nil {
+ case "LabelChangeOperation.Hash":
+ if e.complexity.LabelChangeOperation.Hash == nil {
break
}
- return e.complexity.LabelChangeOperation.Added(childComplexity), true
+ return e.complexity.LabelChangeOperation.Hash(childComplexity), true
- case "LabelChangeOperation.removed":
+ case "LabelChangeOperation.Removed":
if e.complexity.LabelChangeOperation.Removed == nil {
break
}
return e.complexity.LabelChangeOperation.Removed(childComplexity), true
- case "LabelChangeTimelineItem.hash":
- if e.complexity.LabelChangeTimelineItem.Hash == nil {
+ case "LabelChangeTimelineItem.Added":
+ if e.complexity.LabelChangeTimelineItem.Added == nil {
break
}
- return e.complexity.LabelChangeTimelineItem.Hash(childComplexity), true
+ return e.complexity.LabelChangeTimelineItem.Added(childComplexity), true
- case "LabelChangeTimelineItem.author":
+ case "LabelChangeTimelineItem.Author":
if e.complexity.LabelChangeTimelineItem.Author == nil {
break
}
return e.complexity.LabelChangeTimelineItem.Author(childComplexity), true
- case "LabelChangeTimelineItem.date":
+ case "LabelChangeTimelineItem.Date":
if e.complexity.LabelChangeTimelineItem.Date == nil {
break
}
return e.complexity.LabelChangeTimelineItem.Date(childComplexity), true
- case "LabelChangeTimelineItem.added":
- if e.complexity.LabelChangeTimelineItem.Added == nil {
+ case "LabelChangeTimelineItem.Hash":
+ if e.complexity.LabelChangeTimelineItem.Hash == nil {
break
}
- return e.complexity.LabelChangeTimelineItem.Added(childComplexity), true
+ return e.complexity.LabelChangeTimelineItem.Hash(childComplexity), true
- case "LabelChangeTimelineItem.removed":
+ case "LabelChangeTimelineItem.Removed":
if e.complexity.LabelChangeTimelineItem.Removed == nil {
break
}
return e.complexity.LabelChangeTimelineItem.Removed(childComplexity), true
- case "Mutation.newBug":
- if e.complexity.Mutation.NewBug == nil {
+ case "Mutation.AddComment":
+ if e.complexity.Mutation.AddComment == nil {
break
}
- args, err := field_Mutation_newBug_args(rawArgs)
+ args, err := ec.field_Mutation_addComment_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Mutation.NewBug(childComplexity, args["repoRef"].(*string), args["title"].(string), args["message"].(string), args["files"].([]git.Hash)), true
+ return e.complexity.Mutation.AddComment(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["message"].(string), args["files"].([]git.Hash)), true
- case "Mutation.addComment":
- if e.complexity.Mutation.AddComment == nil {
+ case "Mutation.ChangeLabels":
+ if e.complexity.Mutation.ChangeLabels == nil {
break
}
- args, err := field_Mutation_addComment_args(rawArgs)
+ args, err := ec.field_Mutation_changeLabels_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Mutation.AddComment(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["message"].(string), args["files"].([]git.Hash)), true
+ return e.complexity.Mutation.ChangeLabels(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["added"].([]string), args["removed"].([]string)), true
- case "Mutation.changeLabels":
- if e.complexity.Mutation.ChangeLabels == nil {
+ case "Mutation.Close":
+ if e.complexity.Mutation.Close == nil {
break
}
- args, err := field_Mutation_changeLabels_args(rawArgs)
+ args, err := ec.field_Mutation_close_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Mutation.ChangeLabels(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["added"].([]string), args["removed"].([]string)), true
+ return e.complexity.Mutation.Close(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
- case "Mutation.open":
- if e.complexity.Mutation.Open == nil {
+ case "Mutation.Commit":
+ if e.complexity.Mutation.Commit == nil {
break
}
- args, err := field_Mutation_open_args(rawArgs)
+ args, err := ec.field_Mutation_commit_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Mutation.Open(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
+ return e.complexity.Mutation.Commit(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
- case "Mutation.close":
- if e.complexity.Mutation.Close == nil {
+ case "Mutation.NewBug":
+ if e.complexity.Mutation.NewBug == nil {
break
}
- args, err := field_Mutation_close_args(rawArgs)
+ args, err := ec.field_Mutation_newBug_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Mutation.Close(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
+ return e.complexity.Mutation.NewBug(childComplexity, args["repoRef"].(*string), args["title"].(string), args["message"].(string), args["files"].([]git.Hash)), true
- case "Mutation.setTitle":
- if e.complexity.Mutation.SetTitle == nil {
+ case "Mutation.Open":
+ if e.complexity.Mutation.Open == nil {
break
}
- args, err := field_Mutation_setTitle_args(rawArgs)
+ args, err := ec.field_Mutation_open_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Mutation.SetTitle(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["title"].(string)), true
+ return e.complexity.Mutation.Open(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
- case "Mutation.commit":
- if e.complexity.Mutation.Commit == nil {
+ case "Mutation.SetTitle":
+ if e.complexity.Mutation.SetTitle == nil {
break
}
- args, err := field_Mutation_commit_args(rawArgs)
+ args, err := ec.field_Mutation_setTitle_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Mutation.Commit(childComplexity, args["repoRef"].(*string), args["prefix"].(string)), true
+ return e.complexity.Mutation.SetTitle(childComplexity, args["repoRef"].(*string), args["prefix"].(string), args["title"].(string)), true
- case "OperationConnection.edges":
+ case "OperationConnection.Edges":
if e.complexity.OperationConnection.Edges == nil {
break
}
return e.complexity.OperationConnection.Edges(childComplexity), true
- case "OperationConnection.nodes":
+ case "OperationConnection.Nodes":
if e.complexity.OperationConnection.Nodes == nil {
break
}
return e.complexity.OperationConnection.Nodes(childComplexity), true
- case "OperationConnection.pageInfo":
+ case "OperationConnection.PageInfo":
if e.complexity.OperationConnection.PageInfo == nil {
break
}
return e.complexity.OperationConnection.PageInfo(childComplexity), true
- case "OperationConnection.totalCount":
+ case "OperationConnection.TotalCount":
if e.complexity.OperationConnection.TotalCount == nil {
break
}
return e.complexity.OperationConnection.TotalCount(childComplexity), true
- case "OperationEdge.cursor":
+ case "OperationEdge.Cursor":
if e.complexity.OperationEdge.Cursor == nil {
break
}
return e.complexity.OperationEdge.Cursor(childComplexity), true
- case "OperationEdge.node":
+ case "OperationEdge.Node":
if e.complexity.OperationEdge.Node == nil {
break
}
return e.complexity.OperationEdge.Node(childComplexity), true
- case "PageInfo.hasNextPage":
+ case "PageInfo.EndCursor":
+ if e.complexity.PageInfo.EndCursor == nil {
+ break
+ }
+
+ return e.complexity.PageInfo.EndCursor(childComplexity), true
+
+ case "PageInfo.HasNextPage":
if e.complexity.PageInfo.HasNextPage == nil {
break
}
return e.complexity.PageInfo.HasNextPage(childComplexity), true
- case "PageInfo.hasPreviousPage":
+ case "PageInfo.HasPreviousPage":
if e.complexity.PageInfo.HasPreviousPage == nil {
break
}
return e.complexity.PageInfo.HasPreviousPage(childComplexity), true
- case "PageInfo.startCursor":
+ case "PageInfo.StartCursor":
if e.complexity.PageInfo.StartCursor == nil {
break
}
return e.complexity.PageInfo.StartCursor(childComplexity), true
- case "PageInfo.endCursor":
- if e.complexity.PageInfo.EndCursor == nil {
- break
- }
-
- return e.complexity.PageInfo.EndCursor(childComplexity), true
-
- case "Query.defaultRepository":
+ case "Query.DefaultRepository":
if e.complexity.Query.DefaultRepository == nil {
break
}
return e.complexity.Query.DefaultRepository(childComplexity), true
- case "Query.repository":
+ case "Query.Repository":
if e.complexity.Query.Repository == nil {
break
}
- args, err := field_Query_repository_args(rawArgs)
+ args, err := ec.field_Query_repository_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Query.Repository(childComplexity, args["id"].(string)), true
- case "Repository.allBugs":
+ case "Repository.AllBugs":
if e.complexity.Repository.AllBugs == nil {
break
}
- args, err := field_Repository_allBugs_args(rawArgs)
+ args, err := ec.field_Repository_allBugs_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Repository.AllBugs(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int), args["query"].(*string)), true
- case "Repository.bug":
- if e.complexity.Repository.Bug == nil {
+ case "Repository.AllIdentities":
+ if e.complexity.Repository.AllIdentities == nil {
break
}
- args, err := field_Repository_bug_args(rawArgs)
+ args, err := ec.field_Repository_allIdentities_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Repository.Bug(childComplexity, args["prefix"].(string)), true
+ return e.complexity.Repository.AllIdentities(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
- case "Repository.allIdentities":
- if e.complexity.Repository.AllIdentities == nil {
+ case "Repository.Bug":
+ if e.complexity.Repository.Bug == nil {
break
}
- args, err := field_Repository_allIdentities_args(rawArgs)
+ args, err := ec.field_Repository_bug_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
- return e.complexity.Repository.AllIdentities(childComplexity, args["after"].(*string), args["before"].(*string), args["first"].(*int), args["last"].(*int)), true
+ return e.complexity.Repository.Bug(childComplexity, args["prefix"].(string)), true
- case "Repository.identity":
+ case "Repository.Identity":
if e.complexity.Repository.Identity == nil {
break
}
- args, err := field_Repository_identity_args(rawArgs)
+ args, err := ec.field_Repository_identity_args(context.TODO(), rawArgs)
if err != nil {
return 0, false
}
return e.complexity.Repository.Identity(childComplexity, args["prefix"].(string)), true
- case "Repository.userIdentity":
+ case "Repository.UserIdentity":
if e.complexity.Repository.UserIdentity == nil {
break
}
return e.complexity.Repository.UserIdentity(childComplexity), true
- case "SetStatusOperation.hash":
- if e.complexity.SetStatusOperation.Hash == nil {
- break
- }
-
- return e.complexity.SetStatusOperation.Hash(childComplexity), true
-
- case "SetStatusOperation.author":
+ case "SetStatusOperation.Author":
if e.complexity.SetStatusOperation.Author == nil {
break
}
return e.complexity.SetStatusOperation.Author(childComplexity), true
- case "SetStatusOperation.date":
+ case "SetStatusOperation.Date":
if e.complexity.SetStatusOperation.Date == nil {
break
}
return e.complexity.SetStatusOperation.Date(childComplexity), true
- case "SetStatusOperation.status":
- if e.complexity.SetStatusOperation.Status == nil {
+ case "SetStatusOperation.Hash":
+ if e.complexity.SetStatusOperation.Hash == nil {
break
}
- return e.complexity.SetStatusOperation.Status(childComplexity), true
+ return e.complexity.SetStatusOperation.Hash(childComplexity), true
- case "SetStatusTimelineItem.hash":
- if e.complexity.SetStatusTimelineItem.Hash == nil {
+ case "SetStatusOperation.Status":
+ if e.complexity.SetStatusOperation.Status == nil {
break
}
- return e.complexity.SetStatusTimelineItem.Hash(childComplexity), true
+ return e.complexity.SetStatusOperation.Status(childComplexity), true
- case "SetStatusTimelineItem.author":
+ case "SetStatusTimelineItem.Author":
if e.complexity.SetStatusTimelineItem.Author == nil {
break
}
return e.complexity.SetStatusTimelineItem.Author(childComplexity), true
- case "SetStatusTimelineItem.date":
+ case "SetStatusTimelineItem.Date":
if e.complexity.SetStatusTimelineItem.Date == nil {
break
}
return e.complexity.SetStatusTimelineItem.Date(childComplexity), true
- case "SetStatusTimelineItem.status":
- if e.complexity.SetStatusTimelineItem.Status == nil {
+ case "SetStatusTimelineItem.Hash":
+ if e.complexity.SetStatusTimelineItem.Hash == nil {
break
}
- return e.complexity.SetStatusTimelineItem.Status(childComplexity), true
+ return e.complexity.SetStatusTimelineItem.Hash(childComplexity), true
- case "SetTitleOperation.hash":
- if e.complexity.SetTitleOperation.Hash == nil {
+ case "SetStatusTimelineItem.Status":
+ if e.complexity.SetStatusTimelineItem.Status == nil {
break
}
- return e.complexity.SetTitleOperation.Hash(childComplexity), true
+ return e.complexity.SetStatusTimelineItem.Status(childComplexity), true
- case "SetTitleOperation.author":
+ case "SetTitleOperation.Author":
if e.complexity.SetTitleOperation.Author == nil {
break
}
return e.complexity.SetTitleOperation.Author(childComplexity), true
- case "SetTitleOperation.date":
+ case "SetTitleOperation.Date":
if e.complexity.SetTitleOperation.Date == nil {
break
}
return e.complexity.SetTitleOperation.Date(childComplexity), true
- case "SetTitleOperation.title":
- if e.complexity.SetTitleOperation.Title == nil {
+ case "SetTitleOperation.Hash":
+ if e.complexity.SetTitleOperation.Hash == nil {
break
}
- return e.complexity.SetTitleOperation.Title(childComplexity), true
+ return e.complexity.SetTitleOperation.Hash(childComplexity), true
- case "SetTitleOperation.was":
- if e.complexity.SetTitleOperation.Was == nil {
+ case "SetTitleOperation.Title":
+ if e.complexity.SetTitleOperation.Title == nil {
break
}
- return e.complexity.SetTitleOperation.Was(childComplexity), true
+ return e.complexity.SetTitleOperation.Title(childComplexity), true
- case "SetTitleTimelineItem.hash":
- if e.complexity.SetTitleTimelineItem.Hash == nil {
+ case "SetTitleOperation.Was":
+ if e.complexity.SetTitleOperation.Was == nil {
break
}
- return e.complexity.SetTitleTimelineItem.Hash(childComplexity), true
+ return e.complexity.SetTitleOperation.Was(childComplexity), true
- case "SetTitleTimelineItem.author":
+ case "SetTitleTimelineItem.Author":
if e.complexity.SetTitleTimelineItem.Author == nil {
break
}
return e.complexity.SetTitleTimelineItem.Author(childComplexity), true
- case "SetTitleTimelineItem.date":
+ case "SetTitleTimelineItem.Date":
if e.complexity.SetTitleTimelineItem.Date == nil {
break
}
return e.complexity.SetTitleTimelineItem.Date(childComplexity), true
- case "SetTitleTimelineItem.title":
+ case "SetTitleTimelineItem.Hash":
+ if e.complexity.SetTitleTimelineItem.Hash == nil {
+ break
+ }
+
+ return e.complexity.SetTitleTimelineItem.Hash(childComplexity), true
+
+ case "SetTitleTimelineItem.Title":
if e.complexity.SetTitleTimelineItem.Title == nil {
break
}
return e.complexity.SetTitleTimelineItem.Title(childComplexity), true
- case "SetTitleTimelineItem.was":
+ case "SetTitleTimelineItem.Was":
if e.complexity.SetTitleTimelineItem.Was == nil {
break
}
return e.complexity.SetTitleTimelineItem.Was(childComplexity), true
- case "TimelineItemConnection.edges":
+ case "TimelineItemConnection.Edges":
if e.complexity.TimelineItemConnection.Edges == nil {
break
}
return e.complexity.TimelineItemConnection.Edges(childComplexity), true
- case "TimelineItemConnection.nodes":
+ case "TimelineItemConnection.Nodes":
if e.complexity.TimelineItemConnection.Nodes == nil {
break
}
return e.complexity.TimelineItemConnection.Nodes(childComplexity), true
- case "TimelineItemConnection.pageInfo":
+ case "TimelineItemConnection.PageInfo":
if e.complexity.TimelineItemConnection.PageInfo == nil {
break
}
return e.complexity.TimelineItemConnection.PageInfo(childComplexity), true
- case "TimelineItemConnection.totalCount":
+ case "TimelineItemConnection.TotalCount":
if e.complexity.TimelineItemConnection.TotalCount == nil {
break
}
return e.complexity.TimelineItemConnection.TotalCount(childComplexity), true
- case "TimelineItemEdge.cursor":
+ case "TimelineItemEdge.Cursor":
if e.complexity.TimelineItemEdge.Cursor == nil {
break
}
return e.complexity.TimelineItemEdge.Cursor(childComplexity), true
- case "TimelineItemEdge.node":
+ case "TimelineItemEdge.Node":
if e.complexity.TimelineItemEdge.Node == nil {
break
}
@@ -2288,7 +1444,8 @@ func (e *executableSchema) Query(ctx context.Context, op *ast.OperationDefinitio
return &graphql.Response{
Data: buf,
Errors: ec.Errors,
- Extensions: ec.Extensions}
+ Extensions: ec.Extensions,
+ }
}
func (e *executableSchema) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
@@ -2317,69 +1474,1035 @@ type executionContext struct {
*executableSchema
}
-var addCommentOperationImplementors = []string{"AddCommentOperation", "Operation", "Authored"}
+func (ec *executionContext) FieldMiddleware(ctx context.Context, obj interface{}, next graphql.Resolver) (ret interface{}) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ res, err := ec.ResolverMiddleware(ctx, next)
+ if err != nil {
+ ec.Error(ctx, err)
+ return nil
+ }
+ return res
+}
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _AddCommentOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.AddCommentOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, addCommentOperationImplementors)
+func (ec *executionContext) introspectSchema() (*introspection.Schema, error) {
+ if ec.DisableIntrospection {
+ return nil, errors.New("introspection disabled")
+ }
+ return introspection.WrapSchema(parsedSchema), nil
+}
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
+func (ec *executionContext) introspectType(name string) (*introspection.Type, error) {
+ if ec.DisableIntrospection {
+ return nil, errors.New("introspection disabled")
+ }
+ return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name]), nil
+}
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("AddCommentOperation")
- case "hash":
- out.Values[i] = ec._AddCommentOperation_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._AddCommentOperation_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._AddCommentOperation_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "message":
- out.Values[i] = ec._AddCommentOperation_message(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "files":
- out.Values[i] = ec._AddCommentOperation_files(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
+var parsedSchema = gqlparser.MustLoadSchema(
+ &ast.Source{Name: "schema/bug.graphql", Input: `"""Represents a comment on a bug."""
+type Comment implements Authored {
+ """The author of this comment."""
+ author: Identity!
+
+ """The message of this comment."""
+ message: String!
+
+ """All media's hash referenced in this comment"""
+ files: [Hash!]!
+}
+
+type CommentConnection {
+ edges: [CommentEdge!]!
+ nodes: [Comment!]!
+ pageInfo: PageInfo!
+ totalCount: Int!
+}
+
+type CommentEdge {
+ cursor: String!
+ node: Comment!
+}
+
+enum Status {
+ OPEN
+ CLOSED
+}
+
+type Bug {
+ """The identifier for this bug"""
+ id: String!
+ """The human version (truncated) identifier for this bug"""
+ humanId: String!
+ status: Status!
+ title: String!
+ labels: [Label!]!
+ author: Identity!
+ createdAt: Time!
+ lastEdit: Time!
+
+ """The actors of the bug. Actors are Identity that have interacted with the bug."""
+ actors(
+ """Returns the elements in the list that come after the specified cursor."""
+ after: String
+ """Returns the elements in the list that come before the specified cursor."""
+ before: String
+ """Returns the first _n_ elements from the list."""
+ first: Int
+ """Returns the last _n_ elements from the list."""
+ last: Int
+ ): IdentityConnection!
+
+ """The participants of the bug. Participants are Identity that have created or
+ added a comment on the bug."""
+ participants(
+ """Returns the elements in the list that come after the specified cursor."""
+ after: String
+ """Returns the elements in the list that come before the specified cursor."""
+ before: String
+ """Returns the first _n_ elements from the list."""
+ first: Int
+ """Returns the last _n_ elements from the list."""
+ last: Int
+ ): IdentityConnection!
+
+ comments(
+ """Returns the elements in the list that come after the specified cursor."""
+ after: String
+ """Returns the elements in the list that come before the specified cursor."""
+ before: String
+ """Returns the first _n_ elements from the list."""
+ first: Int
+ """Returns the last _n_ elements from the list."""
+ last: Int
+ ): CommentConnection!
+
+ timeline(
+ """Returns the elements in the list that come after the specified cursor."""
+ after: String
+ """Returns the elements in the list that come before the specified cursor."""
+ before: String
+ """Returns the first _n_ elements from the list."""
+ first: Int
+ """Returns the last _n_ elements from the list."""
+ last: Int
+ ): TimelineItemConnection!
+
+ operations(
+ """Returns the elements in the list that come after the specified cursor."""
+ after: String
+ """Returns the elements in the list that come before the specified cursor."""
+ before: String
+ """Returns the first _n_ elements from the list."""
+ first: Int
+ """Returns the last _n_ elements from the list."""
+ last: Int
+ ): OperationConnection!
+}
+
+"""The connection type for Bug."""
+type BugConnection {
+ """A list of edges."""
+ edges: [BugEdge!]!
+ nodes: [Bug!]!
+ """Information to aid in pagination."""
+ pageInfo: PageInfo!
+ """Identifies the total count of items in the connection."""
+ totalCount: Int!
+}
+
+"""An edge in a connection."""
+type BugEdge {
+ """A cursor for use in pagination."""
+ cursor: String!
+ """The item at the end of the edge."""
+ node: Bug!
+}
+
+`},
+ &ast.Source{Name: "schema/identity.graphql", Input: `"""Represents an identity"""
+type Identity {
+ """The identifier for this identity"""
+ id: String!
+ """The human version (truncated) identifier for this identity"""
+ humanId: String!
+ """The name of the person, if known."""
+ name: String
+ """The email of the person, if known."""
+ email: String
+ """The login of the person, if known."""
+ login: String
+ """A string containing the either the name of the person, its login or both"""
+ displayName: String!
+ """An url to an avatar"""
+ avatarUrl: String
+ """isProtected is true if the chain of git commits started to be signed.
+ If that's the case, only signed commit with a valid key for this identity can be added."""
+ isProtected: Boolean!
+}
+
+type IdentityConnection {
+ edges: [IdentityEdge!]!
+ nodes: [Identity!]!
+ pageInfo: PageInfo!
+ totalCount: Int!
+}
+
+type IdentityEdge {
+ cursor: String!
+ node: Identity!
+}`},
+ &ast.Source{Name: "schema/operations.graphql", Input: `"""An operation applied to a bug."""
+interface Operation {
+ """The hash of the operation"""
+ hash: Hash!
+ """The operations author."""
+ author: Identity!
+ """The datetime when this operation was issued."""
+ date: Time!
+}
+
+# Connection
+
+"""The connection type for an Operation"""
+type OperationConnection {
+ edges: [OperationEdge!]!
+ nodes: [Operation!]!
+ pageInfo: PageInfo!
+ totalCount: Int!
+}
+
+"""Represent an Operation"""
+type OperationEdge {
+ cursor: String!
+ node: Operation!
+}
+
+# Operations
+
+type CreateOperation implements Operation & Authored {
+ """The hash of the operation"""
+ hash: Hash!
+ """The author of this object."""
+ author: Identity!
+ """The datetime when this operation was issued."""
+ date: Time!
+
+ title: String!
+ message: String!
+ files: [Hash!]!
+}
+
+type SetTitleOperation implements Operation & Authored {
+ """The hash of the operation"""
+ hash: Hash!
+ """The author of this object."""
+ author: Identity!
+ """The datetime when this operation was issued."""
+ date: Time!
+
+ title: String!
+ was: String!
+}
+
+type AddCommentOperation implements Operation & Authored {
+ """The hash of the operation"""
+ hash: Hash!
+ """The author of this object."""
+ author: Identity!
+ """The datetime when this operation was issued."""
+ date: Time!
+
+ message: String!
+ files: [Hash!]!
+}
+
+type EditCommentOperation implements Operation & Authored {
+ """The hash of the operation"""
+ hash: Hash!
+ """The author of this object."""
+ author: Identity!
+ """The datetime when this operation was issued."""
+ date: Time!
+
+ target: Hash!
+ message: String!
+ files: [Hash!]!
+}
+
+type SetStatusOperation implements Operation & Authored {
+ """The hash of the operation"""
+ hash: Hash!
+ """The author of this object."""
+ author: Identity!
+ """The datetime when this operation was issued."""
+ date: Time!
+
+ status: Status!
+}
+
+type LabelChangeOperation implements Operation & Authored {
+ """The hash of the operation"""
+ hash: Hash!
+ """The author of this object."""
+ author: Identity!
+ """The datetime when this operation was issued."""
+ date: Time!
+
+ added: [Label!]!
+ removed: [Label!]!
+}
+`},
+ &ast.Source{Name: "schema/repository.graphql", Input: `
+type Repository {
+ """All the bugs"""
+ allBugs(
+ """Returns the elements in the list that come after the specified cursor."""
+ after: String
+ """Returns the elements in the list that come before the specified cursor."""
+ before: String
+ """Returns the first _n_ elements from the list."""
+ first: Int
+ """Returns the last _n_ elements from the list."""
+ last: Int
+ """A query to select and order bugs"""
+ query: String
+ ): BugConnection!
+
+ bug(prefix: String!): Bug
+
+ """All the identities"""
+ allIdentities(
+ """Returns the elements in the list that come after the specified cursor."""
+ after: String
+ """Returns the elements in the list that come before the specified cursor."""
+ before: String
+ """Returns the first _n_ elements from the list."""
+ first: Int
+ """Returns the last _n_ elements from the list."""
+ last: Int
+ ): IdentityConnection!
+
+ identity(prefix: String!):Identity
+
+ """The identity created or selected by the user as its own"""
+ userIdentity:Identity
+}`},
+ &ast.Source{Name: "schema/root.graphql", Input: `type Query {
+ defaultRepository: Repository
+ repository(id: String!): Repository
+}
+
+type Mutation {
+ newBug(repoRef: String, title: String!, message: String!, files: [Hash!]): Bug!
+
+ addComment(repoRef: String, prefix: String!, message: String!, files: [Hash!]): Bug!
+ changeLabels(repoRef: String, prefix: String!, added: [String!], removed: [String!]): Bug!
+ open(repoRef: String, prefix: String!): Bug!
+ close(repoRef: String, prefix: String!): Bug!
+ setTitle(repoRef: String, prefix: String!, title: String!): Bug!
+
+ commit(repoRef: String, prefix: String!): Bug!
+}
+`},
+ &ast.Source{Name: "schema/timeline.graphql", Input: `"""An item in the timeline of events"""
+interface TimelineItem {
+ """The hash of the source operation"""
+ hash: Hash!
+}
+
+"""CommentHistoryStep hold one version of a message in the history"""
+type CommentHistoryStep {
+ message: String!
+ date: Time!
+}
+
+# Connection
+
+"""The connection type for TimelineItem"""
+type TimelineItemConnection {
+ edges: [TimelineItemEdge!]!
+ nodes: [TimelineItem!]!
+ pageInfo: PageInfo!
+ totalCount: Int!
+}
+
+"""Represent a TimelineItem"""
+type TimelineItemEdge {
+ cursor: String!
+ node: TimelineItem!
+}
+
+# Items
+
+"""CreateTimelineItem is a TimelineItem that represent the creation of a bug and its message edition history"""
+type CreateTimelineItem implements TimelineItem {
+ """The hash of the source operation"""
+ hash: Hash!
+ author: Identity!
+ message: String!
+ messageIsEmpty: Boolean!
+ files: [Hash!]!
+ createdAt: Time!
+ lastEdit: Time!
+ edited: Boolean!
+ history: [CommentHistoryStep!]!
+}
+
+"""AddCommentTimelineItem is a TimelineItem that represent a Comment and its edition history"""
+type AddCommentTimelineItem implements TimelineItem {
+ """The hash of the source operation"""
+ hash: Hash!
+ author: Identity!
+ message: String!
+ messageIsEmpty: Boolean!
+ files: [Hash!]!
+ createdAt: Time!
+ lastEdit: Time!
+ edited: Boolean!
+ history: [CommentHistoryStep!]!
+}
+
+"""LabelChangeTimelineItem is a TimelineItem that represent a change in the labels of a bug"""
+type LabelChangeTimelineItem implements TimelineItem {
+ """The hash of the source operation"""
+ hash: Hash!
+ author: Identity!
+ date: Time!
+ added: [Label!]!
+ removed: [Label!]!
+}
+
+"""SetStatusTimelineItem is a TimelineItem that represent a change in the status of a bug"""
+type SetStatusTimelineItem implements TimelineItem {
+ """The hash of the source operation"""
+ hash: Hash!
+ author: Identity!
+ date: Time!
+ status: Status!
+}
+
+"""LabelChangeTimelineItem is a TimelineItem that represent a change in the title of a bug"""
+type SetTitleTimelineItem implements TimelineItem {
+ """The hash of the source operation"""
+ hash: Hash!
+ author: Identity!
+ date: Time!
+ title: String!
+ was: String!
+}
+`},
+ &ast.Source{Name: "schema/types.graphql", Input: `scalar Time
+scalar Label
+scalar Hash
+
+"""Information about pagination in a connection."""
+type PageInfo {
+ """When paginating forwards, are there more items?"""
+ hasNextPage: Boolean!
+ """When paginating backwards, are there more items?"""
+ hasPreviousPage: Boolean!
+ """When paginating backwards, the cursor to continue."""
+ startCursor: String!
+ """When paginating forwards, the cursor to continue."""
+ endCursor: String!
+}
+
+"""An object that has an author."""
+interface Authored {
+ """The author of this object."""
+ author: Identity!
+}`},
+)
+
+// endregion ************************** generated!.gotpl **************************
+
+// region ***************************** args.gotpl *****************************
+
+func (ec *executionContext) field_Bug_actors_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
}
}
- wg.Wait()
- if invalid {
- return graphql.Null
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ arg1, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
}
- return out
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ arg2, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ arg3, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ return args, nil
+}
+
+func (ec *executionContext) field_Bug_comments_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ arg1, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ arg2, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ arg3, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ return args, nil
+}
+
+func (ec *executionContext) field_Bug_operations_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ arg1, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ arg2, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ arg3, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ return args, nil
+}
+
+func (ec *executionContext) field_Bug_participants_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ arg1, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ arg2, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ arg3, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ return args, nil
}
-// nolint: vetshadow
+func (ec *executionContext) field_Bug_timeline_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ arg1, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ arg2, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ arg3, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ return args, nil
+}
+
+func (ec *executionContext) field_Mutation_addComment_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ arg1, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ var arg2 string
+ if tmp, ok := rawArgs["message"]; ok {
+ arg2, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["message"] = arg2
+ var arg3 []git.Hash
+ if tmp, ok := rawArgs["files"]; ok {
+ arg3, err = ec.unmarshalOHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["files"] = arg3
+ return args, nil
+}
+
+func (ec *executionContext) field_Mutation_changeLabels_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ arg1, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ var arg2 []string
+ if tmp, ok := rawArgs["added"]; ok {
+ arg2, err = ec.unmarshalOString2ᚕstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["added"] = arg2
+ var arg3 []string
+ if tmp, ok := rawArgs["removed"]; ok {
+ arg3, err = ec.unmarshalOString2ᚕstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["removed"] = arg3
+ return args, nil
+}
+
+func (ec *executionContext) field_Mutation_close_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ arg1, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ return args, nil
+}
+
+func (ec *executionContext) field_Mutation_commit_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ arg1, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ return args, nil
+}
+
+func (ec *executionContext) field_Mutation_newBug_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["title"]; ok {
+ arg1, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["title"] = arg1
+ var arg2 string
+ if tmp, ok := rawArgs["message"]; ok {
+ arg2, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["message"] = arg2
+ var arg3 []git.Hash
+ if tmp, ok := rawArgs["files"]; ok {
+ arg3, err = ec.unmarshalOHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["files"] = arg3
+ return args, nil
+}
+
+func (ec *executionContext) field_Mutation_open_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ arg1, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ return args, nil
+}
+
+func (ec *executionContext) field_Mutation_setTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["repoRef"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["repoRef"] = arg0
+ var arg1 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ arg1, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg1
+ var arg2 string
+ if tmp, ok := rawArgs["title"]; ok {
+ arg2, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["title"] = arg2
+ return args, nil
+}
+
+func (ec *executionContext) field_Query___type_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 string
+ if tmp, ok := rawArgs["name"]; ok {
+ arg0, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["name"] = arg0
+ return args, nil
+}
+
+func (ec *executionContext) field_Query_repository_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 string
+ if tmp, ok := rawArgs["id"]; ok {
+ arg0, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["id"] = arg0
+ return args, nil
+}
+
+func (ec *executionContext) field_Repository_allBugs_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ arg1, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ arg2, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ arg3, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ var arg4 *string
+ if tmp, ok := rawArgs["query"]; ok {
+ arg4, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["query"] = arg4
+ return args, nil
+}
+
+func (ec *executionContext) field_Repository_allIdentities_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 *string
+ if tmp, ok := rawArgs["after"]; ok {
+ arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["after"] = arg0
+ var arg1 *string
+ if tmp, ok := rawArgs["before"]; ok {
+ arg1, err = ec.unmarshalOString2ᚖstring(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["before"] = arg1
+ var arg2 *int
+ if tmp, ok := rawArgs["first"]; ok {
+ arg2, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["first"] = arg2
+ var arg3 *int
+ if tmp, ok := rawArgs["last"]; ok {
+ arg3, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["last"] = arg3
+ return args, nil
+}
+
+func (ec *executionContext) field_Repository_bug_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ arg0, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg0
+ return args, nil
+}
+
+func (ec *executionContext) field_Repository_identity_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 string
+ if tmp, ok := rawArgs["prefix"]; ok {
+ arg0, err = ec.unmarshalNString2string(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["prefix"] = arg0
+ return args, nil
+}
+
+func (ec *executionContext) field___Type_enumValues_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 bool
+ if tmp, ok := rawArgs["includeDeprecated"]; ok {
+ arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["includeDeprecated"] = arg0
+ return args, nil
+}
+
+func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ var arg0 bool
+ if tmp, ok := rawArgs["includeDeprecated"]; ok {
+ arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ }
+ args["includeDeprecated"] = arg0
+ return args, nil
+}
+
+// endregion ***************************** args.gotpl *****************************
+
+// region **************************** field.gotpl *****************************
+
func (ec *executionContext) _AddCommentOperation_hash(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentOperation",
- Args: nil,
- Field: field,
+ Object: "AddCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2396,17 +2519,17 @@ func (ec *executionContext) _AddCommentOperation_hash(ctx context.Context, field
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentOperation_author(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentOperation",
- Args: nil,
- Field: field,
+ Object: "AddCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2423,18 +2546,17 @@ func (ec *executionContext) _AddCommentOperation_author(ctx context.Context, fie
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentOperation_date(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentOperation",
- Args: nil,
- Field: field,
+ Object: "AddCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2448,20 +2570,20 @@ func (ec *executionContext) _AddCommentOperation_date(ctx context.Context, field
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentOperation_message(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentOperation",
- Args: nil,
- Field: field,
+ Object: "AddCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2478,17 +2600,17 @@ func (ec *executionContext) _AddCommentOperation_message(ctx context.Context, fi
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentOperation_files(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentOperation",
- Args: nil,
- Field: field,
+ Object: "AddCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2505,105 +2627,17 @@ func (ec *executionContext) _AddCommentOperation_files(ctx context.Context, fiel
res := resTmp.([]git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
+ return ec.marshalNHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-var addCommentTimelineItemImplementors = []string{"AddCommentTimelineItem", "TimelineItem"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _AddCommentTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, addCommentTimelineItemImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("AddCommentTimelineItem")
- case "hash":
- out.Values[i] = ec._AddCommentTimelineItem_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._AddCommentTimelineItem_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "message":
- out.Values[i] = ec._AddCommentTimelineItem_message(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "messageIsEmpty":
- out.Values[i] = ec._AddCommentTimelineItem_messageIsEmpty(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "files":
- out.Values[i] = ec._AddCommentTimelineItem_files(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "createdAt":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._AddCommentTimelineItem_createdAt(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "lastEdit":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._AddCommentTimelineItem_lastEdit(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "edited":
- out.Values[i] = ec._AddCommentTimelineItem_edited(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "history":
- out.Values[i] = ec._AddCommentTimelineItem_history(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _AddCommentTimelineItem_hash(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentTimelineItem",
- Args: nil,
- Field: field,
+ Object: "AddCommentTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2620,17 +2654,17 @@ func (ec *executionContext) _AddCommentTimelineItem_hash(ctx context.Context, fi
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentTimelineItem_author(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentTimelineItem",
- Args: nil,
- Field: field,
+ Object: "AddCommentTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2647,18 +2681,17 @@ func (ec *executionContext) _AddCommentTimelineItem_author(ctx context.Context,
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentTimelineItem_message(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentTimelineItem",
- Args: nil,
- Field: field,
+ Object: "AddCommentTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2675,17 +2708,17 @@ func (ec *executionContext) _AddCommentTimelineItem_message(ctx context.Context,
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentTimelineItem_messageIsEmpty(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentTimelineItem",
- Args: nil,
- Field: field,
+ Object: "AddCommentTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2702,17 +2735,17 @@ func (ec *executionContext) _AddCommentTimelineItem_messageIsEmpty(ctx context.C
res := resTmp.(bool)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalBoolean(res)
+ return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentTimelineItem_files(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentTimelineItem",
- Args: nil,
- Field: field,
+ Object: "AddCommentTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2729,26 +2762,17 @@ func (ec *executionContext) _AddCommentTimelineItem_files(ctx context.Context, f
res := resTmp.([]git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
+ return ec.marshalNHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentTimelineItem_createdAt(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentTimelineItem",
- Args: nil,
- Field: field,
+ Object: "AddCommentTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2762,20 +2786,20 @@ func (ec *executionContext) _AddCommentTimelineItem_createdAt(ctx context.Contex
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentTimelineItem_lastEdit(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentTimelineItem",
- Args: nil,
- Field: field,
+ Object: "AddCommentTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2789,20 +2813,20 @@ func (ec *executionContext) _AddCommentTimelineItem_lastEdit(ctx context.Context
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentTimelineItem_edited(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentTimelineItem",
- Args: nil,
- Field: field,
+ Object: "AddCommentTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2819,17 +2843,17 @@ func (ec *executionContext) _AddCommentTimelineItem_edited(ctx context.Context,
res := resTmp.(bool)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalBoolean(res)
+ return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _AddCommentTimelineItem_history(ctx context.Context, field graphql.CollectedField, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "AddCommentTimelineItem",
- Args: nil,
- Field: field,
+ Object: "AddCommentTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -2846,169 +2870,17 @@ func (ec *executionContext) _AddCommentTimelineItem_history(ctx context.Context,
res := resTmp.([]bug.CommentHistoryStep)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._CommentHistoryStep(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNCommentHistoryStep2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐCommentHistoryStep(ctx, field.Selections, res)
}
-var bugImplementors = []string{"Bug"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Bug(ctx context.Context, sel ast.SelectionSet, obj *bug.Snapshot) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, bugImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("Bug")
- case "id":
- out.Values[i] = ec._Bug_id(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "humanId":
- out.Values[i] = ec._Bug_humanId(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "status":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Bug_status(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "title":
- out.Values[i] = ec._Bug_title(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "labels":
- out.Values[i] = ec._Bug_labels(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._Bug_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "createdAt":
- out.Values[i] = ec._Bug_createdAt(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "lastEdit":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Bug_lastEdit(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "actors":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Bug_actors(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "participants":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Bug_participants(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "comments":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Bug_comments(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "timeline":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Bug_timeline(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "operations":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Bug_operations(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _Bug_id(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: nil,
- Field: field,
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3025,17 +2897,17 @@ func (ec *executionContext) _Bug_id(ctx context.Context, field graphql.Collected
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_humanId(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: nil,
- Field: field,
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3052,17 +2924,17 @@ func (ec *executionContext) _Bug_humanId(ctx context.Context, field graphql.Coll
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_status(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: nil,
- Field: field,
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3079,17 +2951,17 @@ func (ec *executionContext) _Bug_status(ctx context.Context, field graphql.Colle
res := resTmp.(models.Status)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNStatus2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐStatus(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_title(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: nil,
- Field: field,
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3106,17 +2978,17 @@ func (ec *executionContext) _Bug_title(ctx context.Context, field graphql.Collec
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_labels(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: nil,
- Field: field,
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3133,26 +3005,17 @@ func (ec *executionContext) _Bug_labels(ctx context.Context, field graphql.Colle
res := resTmp.([]bug.Label)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
+ return ec.marshalNLabel2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_author(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: nil,
- Field: field,
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3169,18 +3032,17 @@ func (ec *executionContext) _Bug_author(ctx context.Context, field graphql.Colle
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_createdAt(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: nil,
- Field: field,
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3197,17 +3059,17 @@ func (ec *executionContext) _Bug_createdAt(ctx context.Context, field graphql.Co
res := resTmp.(time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2timeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_lastEdit(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: nil,
- Field: field,
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3221,28 +3083,29 @@ func (ec *executionContext) _Bug_lastEdit(ctx context.Context, field graphql.Col
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_actors(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Bug_actors_args(rawArgs)
+ args, err := ec.field_Bug_actors_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -3254,29 +3117,29 @@ func (ec *executionContext) _Bug_actors(ctx context.Context, field graphql.Colle
}
return graphql.Null
}
- res := resTmp.(models.IdentityConnection)
+ res := resTmp.(*models.IdentityConnection)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._IdentityConnection(ctx, field.Selections, &res)
+ return ec.marshalNIdentityConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐIdentityConnection(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_participants(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Bug_participants_args(rawArgs)
+ args, err := ec.field_Bug_participants_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -3288,29 +3151,29 @@ func (ec *executionContext) _Bug_participants(ctx context.Context, field graphql
}
return graphql.Null
}
- res := resTmp.(models.IdentityConnection)
+ res := resTmp.(*models.IdentityConnection)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._IdentityConnection(ctx, field.Selections, &res)
+ return ec.marshalNIdentityConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐIdentityConnection(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_comments(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Bug_comments_args(rawArgs)
+ args, err := ec.field_Bug_comments_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -3322,29 +3185,29 @@ func (ec *executionContext) _Bug_comments(ctx context.Context, field graphql.Col
}
return graphql.Null
}
- res := resTmp.(models.CommentConnection)
+ res := resTmp.(*models.CommentConnection)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._CommentConnection(ctx, field.Selections, &res)
+ return ec.marshalNCommentConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐCommentConnection(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_timeline(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Bug_timeline_args(rawArgs)
+ args, err := ec.field_Bug_timeline_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -3356,29 +3219,29 @@ func (ec *executionContext) _Bug_timeline(ctx context.Context, field graphql.Col
}
return graphql.Null
}
- res := resTmp.(models.TimelineItemConnection)
+ res := resTmp.(*models.TimelineItemConnection)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._TimelineItemConnection(ctx, field.Selections, &res)
+ return ec.marshalNTimelineItemConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐTimelineItemConnection(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Bug_operations(ctx context.Context, field graphql.CollectedField, obj *bug.Snapshot) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Bug",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Bug_operations_args(rawArgs)
+ args, err := ec.field_Bug_operations_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Bug",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -3390,66 +3253,20 @@ func (ec *executionContext) _Bug_operations(ctx context.Context, field graphql.C
}
return graphql.Null
}
- res := resTmp.(models.OperationConnection)
+ res := resTmp.(*models.OperationConnection)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._OperationConnection(ctx, field.Selections, &res)
+ return ec.marshalNOperationConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐOperationConnection(ctx, field.Selections, res)
}
-var bugConnectionImplementors = []string{"BugConnection"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _BugConnection(ctx context.Context, sel ast.SelectionSet, obj *models.BugConnection) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, bugConnectionImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("BugConnection")
- case "edges":
- out.Values[i] = ec._BugConnection_edges(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "nodes":
- out.Values[i] = ec._BugConnection_nodes(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "pageInfo":
- out.Values[i] = ec._BugConnection_pageInfo(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "totalCount":
- out.Values[i] = ec._BugConnection_totalCount(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _BugConnection_edges(ctx context.Context, field graphql.CollectedField, obj *models.BugConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "BugConnection",
- Args: nil,
- Field: field,
+ Object: "BugConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3466,50 +3283,17 @@ func (ec *executionContext) _BugConnection_edges(ctx context.Context, field grap
res := resTmp.([]models.BugEdge)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._BugEdge(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNBugEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐBugEdge(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _BugConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *models.BugConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "BugConnection",
- Args: nil,
- Field: field,
+ Object: "BugConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3526,50 +3310,17 @@ func (ec *executionContext) _BugConnection_nodes(ctx context.Context, field grap
res := resTmp.([]bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._Bug(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNBug2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _BugConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *models.BugConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "BugConnection",
- Args: nil,
- Field: field,
+ Object: "BugConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3586,18 +3337,17 @@ func (ec *executionContext) _BugConnection_pageInfo(ctx context.Context, field g
res := resTmp.(models.PageInfo)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._PageInfo(ctx, field.Selections, &res)
+ return ec.marshalNPageInfo2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐPageInfo(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _BugConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *models.BugConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "BugConnection",
- Args: nil,
- Field: field,
+ Object: "BugConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3614,52 +3364,17 @@ func (ec *executionContext) _BugConnection_totalCount(ctx context.Context, field
res := resTmp.(int)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalInt(res)
+ return ec.marshalNInt2int(ctx, field.Selections, res)
}
-var bugEdgeImplementors = []string{"BugEdge"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _BugEdge(ctx context.Context, sel ast.SelectionSet, obj *models.BugEdge) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, bugEdgeImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("BugEdge")
- case "cursor":
- out.Values[i] = ec._BugEdge_cursor(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "node":
- out.Values[i] = ec._BugEdge_node(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _BugEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *models.BugEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "BugEdge",
- Args: nil,
- Field: field,
+ Object: "BugEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3676,17 +3391,17 @@ func (ec *executionContext) _BugEdge_cursor(ctx context.Context, field graphql.C
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _BugEdge_node(ctx context.Context, field graphql.CollectedField, obj *models.BugEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "BugEdge",
- Args: nil,
- Field: field,
+ Object: "BugEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3703,58 +3418,17 @@ func (ec *executionContext) _BugEdge_node(ctx context.Context, field graphql.Col
res := resTmp.(bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Bug(ctx, field.Selections, &res)
+ return ec.marshalNBug2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-var commentImplementors = []string{"Comment", "Authored"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Comment(ctx context.Context, sel ast.SelectionSet, obj *bug.Comment) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, commentImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("Comment")
- case "author":
- out.Values[i] = ec._Comment_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "message":
- out.Values[i] = ec._Comment_message(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "files":
- out.Values[i] = ec._Comment_files(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _Comment_author(ctx context.Context, field graphql.CollectedField, obj *bug.Comment) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Comment",
- Args: nil,
- Field: field,
+ Object: "Comment",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3771,18 +3445,17 @@ func (ec *executionContext) _Comment_author(ctx context.Context, field graphql.C
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Comment_message(ctx context.Context, field graphql.CollectedField, obj *bug.Comment) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Comment",
- Args: nil,
- Field: field,
+ Object: "Comment",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3799,17 +3472,17 @@ func (ec *executionContext) _Comment_message(ctx context.Context, field graphql.
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Comment_files(ctx context.Context, field graphql.CollectedField, obj *bug.Comment) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Comment",
- Args: nil,
- Field: field,
+ Object: "Comment",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3826,71 +3499,17 @@ func (ec *executionContext) _Comment_files(ctx context.Context, field graphql.Co
res := resTmp.([]git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
-}
-
-var commentConnectionImplementors = []string{"CommentConnection"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _CommentConnection(ctx context.Context, sel ast.SelectionSet, obj *models.CommentConnection) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, commentConnectionImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("CommentConnection")
- case "edges":
- out.Values[i] = ec._CommentConnection_edges(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "nodes":
- out.Values[i] = ec._CommentConnection_nodes(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "pageInfo":
- out.Values[i] = ec._CommentConnection_pageInfo(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "totalCount":
- out.Values[i] = ec._CommentConnection_totalCount(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalNHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CommentConnection_edges(ctx context.Context, field graphql.CollectedField, obj *models.CommentConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CommentConnection",
- Args: nil,
- Field: field,
+ Object: "CommentConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3907,50 +3526,17 @@ func (ec *executionContext) _CommentConnection_edges(ctx context.Context, field
res := resTmp.([]models.CommentEdge)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._CommentEdge(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNCommentEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐCommentEdge(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CommentConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *models.CommentConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CommentConnection",
- Args: nil,
- Field: field,
+ Object: "CommentConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -3967,50 +3553,17 @@ func (ec *executionContext) _CommentConnection_nodes(ctx context.Context, field
res := resTmp.([]bug.Comment)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._Comment(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNComment2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐComment(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CommentConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *models.CommentConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CommentConnection",
- Args: nil,
- Field: field,
+ Object: "CommentConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4027,18 +3580,17 @@ func (ec *executionContext) _CommentConnection_pageInfo(ctx context.Context, fie
res := resTmp.(models.PageInfo)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._PageInfo(ctx, field.Selections, &res)
+ return ec.marshalNPageInfo2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐPageInfo(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CommentConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *models.CommentConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CommentConnection",
- Args: nil,
- Field: field,
+ Object: "CommentConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4055,52 +3607,17 @@ func (ec *executionContext) _CommentConnection_totalCount(ctx context.Context, f
res := resTmp.(int)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalInt(res)
+ return ec.marshalNInt2int(ctx, field.Selections, res)
}
-var commentEdgeImplementors = []string{"CommentEdge"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _CommentEdge(ctx context.Context, sel ast.SelectionSet, obj *models.CommentEdge) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, commentEdgeImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("CommentEdge")
- case "cursor":
- out.Values[i] = ec._CommentEdge_cursor(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "node":
- out.Values[i] = ec._CommentEdge_node(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _CommentEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *models.CommentEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CommentEdge",
- Args: nil,
- Field: field,
+ Object: "CommentEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4117,17 +3634,17 @@ func (ec *executionContext) _CommentEdge_cursor(ctx context.Context, field graph
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CommentEdge_node(ctx context.Context, field graphql.CollectedField, obj *models.CommentEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CommentEdge",
- Args: nil,
- Field: field,
+ Object: "CommentEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4144,58 +3661,17 @@ func (ec *executionContext) _CommentEdge_node(ctx context.Context, field graphql
res := resTmp.(bug.Comment)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Comment(ctx, field.Selections, &res)
+ return ec.marshalNComment2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐComment(ctx, field.Selections, res)
}
-var commentHistoryStepImplementors = []string{"CommentHistoryStep"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _CommentHistoryStep(ctx context.Context, sel ast.SelectionSet, obj *bug.CommentHistoryStep) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, commentHistoryStepImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("CommentHistoryStep")
- case "message":
- out.Values[i] = ec._CommentHistoryStep_message(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._CommentHistoryStep_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _CommentHistoryStep_message(ctx context.Context, field graphql.CollectedField, obj *bug.CommentHistoryStep) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CommentHistoryStep",
- Args: nil,
- Field: field,
+ Object: "CommentHistoryStep",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4212,17 +3688,17 @@ func (ec *executionContext) _CommentHistoryStep_message(ctx context.Context, fie
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CommentHistoryStep_date(ctx context.Context, field graphql.CollectedField, obj *bug.CommentHistoryStep) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CommentHistoryStep",
- Args: nil,
- Field: field,
+ Object: "CommentHistoryStep",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4236,80 +3712,20 @@ func (ec *executionContext) _CommentHistoryStep_date(ctx context.Context, field
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
-}
-
-var createOperationImplementors = []string{"CreateOperation", "Operation", "Authored"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _CreateOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.CreateOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, createOperationImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("CreateOperation")
- case "hash":
- out.Values[i] = ec._CreateOperation_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._CreateOperation_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._CreateOperation_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "title":
- out.Values[i] = ec._CreateOperation_title(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "message":
- out.Values[i] = ec._CreateOperation_message(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "files":
- out.Values[i] = ec._CreateOperation_files(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateOperation_hash(ctx context.Context, field graphql.CollectedField, obj *bug.CreateOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateOperation",
- Args: nil,
- Field: field,
+ Object: "CreateOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4326,17 +3742,17 @@ func (ec *executionContext) _CreateOperation_hash(ctx context.Context, field gra
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateOperation_author(ctx context.Context, field graphql.CollectedField, obj *bug.CreateOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateOperation",
- Args: nil,
- Field: field,
+ Object: "CreateOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4353,18 +3769,17 @@ func (ec *executionContext) _CreateOperation_author(ctx context.Context, field g
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateOperation_date(ctx context.Context, field graphql.CollectedField, obj *bug.CreateOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateOperation",
- Args: nil,
- Field: field,
+ Object: "CreateOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4378,20 +3793,20 @@ func (ec *executionContext) _CreateOperation_date(ctx context.Context, field gra
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateOperation_title(ctx context.Context, field graphql.CollectedField, obj *bug.CreateOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateOperation",
- Args: nil,
- Field: field,
+ Object: "CreateOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4408,17 +3823,17 @@ func (ec *executionContext) _CreateOperation_title(ctx context.Context, field gr
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateOperation_message(ctx context.Context, field graphql.CollectedField, obj *bug.CreateOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateOperation",
- Args: nil,
- Field: field,
+ Object: "CreateOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4435,17 +3850,17 @@ func (ec *executionContext) _CreateOperation_message(ctx context.Context, field
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateOperation_files(ctx context.Context, field graphql.CollectedField, obj *bug.CreateOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateOperation",
- Args: nil,
- Field: field,
+ Object: "CreateOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4462,105 +3877,17 @@ func (ec *executionContext) _CreateOperation_files(ctx context.Context, field gr
res := resTmp.([]git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
+ return ec.marshalNHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-var createTimelineItemImplementors = []string{"CreateTimelineItem", "TimelineItem"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _CreateTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.CreateTimelineItem) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, createTimelineItemImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("CreateTimelineItem")
- case "hash":
- out.Values[i] = ec._CreateTimelineItem_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._CreateTimelineItem_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "message":
- out.Values[i] = ec._CreateTimelineItem_message(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "messageIsEmpty":
- out.Values[i] = ec._CreateTimelineItem_messageIsEmpty(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "files":
- out.Values[i] = ec._CreateTimelineItem_files(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "createdAt":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._CreateTimelineItem_createdAt(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "lastEdit":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._CreateTimelineItem_lastEdit(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "edited":
- out.Values[i] = ec._CreateTimelineItem_edited(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "history":
- out.Values[i] = ec._CreateTimelineItem_history(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _CreateTimelineItem_hash(ctx context.Context, field graphql.CollectedField, obj *bug.CreateTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateTimelineItem",
- Args: nil,
- Field: field,
+ Object: "CreateTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4577,17 +3904,17 @@ func (ec *executionContext) _CreateTimelineItem_hash(ctx context.Context, field
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateTimelineItem_author(ctx context.Context, field graphql.CollectedField, obj *bug.CreateTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateTimelineItem",
- Args: nil,
- Field: field,
+ Object: "CreateTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4604,18 +3931,17 @@ func (ec *executionContext) _CreateTimelineItem_author(ctx context.Context, fiel
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateTimelineItem_message(ctx context.Context, field graphql.CollectedField, obj *bug.CreateTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateTimelineItem",
- Args: nil,
- Field: field,
+ Object: "CreateTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4632,17 +3958,17 @@ func (ec *executionContext) _CreateTimelineItem_message(ctx context.Context, fie
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateTimelineItem_messageIsEmpty(ctx context.Context, field graphql.CollectedField, obj *bug.CreateTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateTimelineItem",
- Args: nil,
- Field: field,
+ Object: "CreateTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4659,17 +3985,17 @@ func (ec *executionContext) _CreateTimelineItem_messageIsEmpty(ctx context.Conte
res := resTmp.(bool)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalBoolean(res)
+ return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateTimelineItem_files(ctx context.Context, field graphql.CollectedField, obj *bug.CreateTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateTimelineItem",
- Args: nil,
- Field: field,
+ Object: "CreateTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4686,26 +4012,17 @@ func (ec *executionContext) _CreateTimelineItem_files(ctx context.Context, field
res := resTmp.([]git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
+ return ec.marshalNHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateTimelineItem_createdAt(ctx context.Context, field graphql.CollectedField, obj *bug.CreateTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateTimelineItem",
- Args: nil,
- Field: field,
+ Object: "CreateTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4719,20 +4036,20 @@ func (ec *executionContext) _CreateTimelineItem_createdAt(ctx context.Context, f
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateTimelineItem_lastEdit(ctx context.Context, field graphql.CollectedField, obj *bug.CreateTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateTimelineItem",
- Args: nil,
- Field: field,
+ Object: "CreateTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4746,20 +4063,20 @@ func (ec *executionContext) _CreateTimelineItem_lastEdit(ctx context.Context, fi
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateTimelineItem_edited(ctx context.Context, field graphql.CollectedField, obj *bug.CreateTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateTimelineItem",
- Args: nil,
- Field: field,
+ Object: "CreateTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4776,17 +4093,17 @@ func (ec *executionContext) _CreateTimelineItem_edited(ctx context.Context, fiel
res := resTmp.(bool)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalBoolean(res)
+ return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _CreateTimelineItem_history(ctx context.Context, field graphql.CollectedField, obj *bug.CreateTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "CreateTimelineItem",
- Args: nil,
- Field: field,
+ Object: "CreateTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4803,110 +4120,17 @@ func (ec *executionContext) _CreateTimelineItem_history(ctx context.Context, fie
res := resTmp.([]bug.CommentHistoryStep)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._CommentHistoryStep(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNCommentHistoryStep2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐCommentHistoryStep(ctx, field.Selections, res)
}
-var editCommentOperationImplementors = []string{"EditCommentOperation", "Operation", "Authored"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _EditCommentOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.EditCommentOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, editCommentOperationImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("EditCommentOperation")
- case "hash":
- out.Values[i] = ec._EditCommentOperation_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._EditCommentOperation_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._EditCommentOperation_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "target":
- out.Values[i] = ec._EditCommentOperation_target(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "message":
- out.Values[i] = ec._EditCommentOperation_message(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "files":
- out.Values[i] = ec._EditCommentOperation_files(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _EditCommentOperation_hash(ctx context.Context, field graphql.CollectedField, obj *bug.EditCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "EditCommentOperation",
- Args: nil,
- Field: field,
+ Object: "EditCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4923,17 +4147,17 @@ func (ec *executionContext) _EditCommentOperation_hash(ctx context.Context, fiel
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _EditCommentOperation_author(ctx context.Context, field graphql.CollectedField, obj *bug.EditCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "EditCommentOperation",
- Args: nil,
- Field: field,
+ Object: "EditCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4950,18 +4174,17 @@ func (ec *executionContext) _EditCommentOperation_author(ctx context.Context, fi
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _EditCommentOperation_date(ctx context.Context, field graphql.CollectedField, obj *bug.EditCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "EditCommentOperation",
- Args: nil,
- Field: field,
+ Object: "EditCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -4975,20 +4198,20 @@ func (ec *executionContext) _EditCommentOperation_date(ctx context.Context, fiel
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _EditCommentOperation_target(ctx context.Context, field graphql.CollectedField, obj *bug.EditCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "EditCommentOperation",
- Args: nil,
- Field: field,
+ Object: "EditCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5005,17 +4228,17 @@ func (ec *executionContext) _EditCommentOperation_target(ctx context.Context, fi
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _EditCommentOperation_message(ctx context.Context, field graphql.CollectedField, obj *bug.EditCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "EditCommentOperation",
- Args: nil,
- Field: field,
+ Object: "EditCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5032,17 +4255,17 @@ func (ec *executionContext) _EditCommentOperation_message(ctx context.Context, f
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _EditCommentOperation_files(ctx context.Context, field graphql.CollectedField, obj *bug.EditCommentOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "EditCommentOperation",
- Args: nil,
- Field: field,
+ Object: "EditCommentOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5059,112 +4282,17 @@ func (ec *executionContext) _EditCommentOperation_files(ctx context.Context, fie
res := resTmp.([]git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
+ return ec.marshalNHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-var identityImplementors = []string{"Identity"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Identity(ctx context.Context, sel ast.SelectionSet, obj *identity.Interface) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, identityImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("Identity")
- case "id":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Identity_id(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "humanId":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Identity_humanId(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "name":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Identity_name(ctx, field, obj)
- wg.Done()
- }(i, field)
- case "email":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Identity_email(ctx, field, obj)
- wg.Done()
- }(i, field)
- case "login":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Identity_login(ctx, field, obj)
- wg.Done()
- }(i, field)
- case "displayName":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Identity_displayName(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "avatarUrl":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Identity_avatarUrl(ctx, field, obj)
- wg.Done()
- }(i, field)
- case "isProtected":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Identity_isProtected(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _Identity_id(ctx context.Context, field graphql.CollectedField, obj *identity.Interface) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Identity",
- Args: nil,
- Field: field,
+ Object: "Identity",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5181,17 +4309,17 @@ func (ec *executionContext) _Identity_id(ctx context.Context, field graphql.Coll
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Identity_humanId(ctx context.Context, field graphql.CollectedField, obj *identity.Interface) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Identity",
- Args: nil,
- Field: field,
+ Object: "Identity",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5208,17 +4336,17 @@ func (ec *executionContext) _Identity_humanId(ctx context.Context, field graphql
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Identity_name(ctx context.Context, field graphql.CollectedField, obj *identity.Interface) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Identity",
- Args: nil,
- Field: field,
+ Object: "Identity",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5232,21 +4360,17 @@ func (ec *executionContext) _Identity_name(ctx context.Context, field graphql.Co
res := resTmp.(*string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
- return graphql.MarshalString(*res)
+ return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Identity_email(ctx context.Context, field graphql.CollectedField, obj *identity.Interface) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Identity",
- Args: nil,
- Field: field,
+ Object: "Identity",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5260,21 +4384,17 @@ func (ec *executionContext) _Identity_email(ctx context.Context, field graphql.C
res := resTmp.(*string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
- return graphql.MarshalString(*res)
+ return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Identity_login(ctx context.Context, field graphql.CollectedField, obj *identity.Interface) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Identity",
- Args: nil,
- Field: field,
+ Object: "Identity",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5288,21 +4408,17 @@ func (ec *executionContext) _Identity_login(ctx context.Context, field graphql.C
res := resTmp.(*string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
- return graphql.MarshalString(*res)
+ return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Identity_displayName(ctx context.Context, field graphql.CollectedField, obj *identity.Interface) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Identity",
- Args: nil,
- Field: field,
+ Object: "Identity",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5319,17 +4435,17 @@ func (ec *executionContext) _Identity_displayName(ctx context.Context, field gra
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Identity_avatarUrl(ctx context.Context, field graphql.CollectedField, obj *identity.Interface) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Identity",
- Args: nil,
- Field: field,
+ Object: "Identity",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5343,21 +4459,17 @@ func (ec *executionContext) _Identity_avatarUrl(ctx context.Context, field graph
res := resTmp.(*string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
- return graphql.MarshalString(*res)
+ return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Identity_isProtected(ctx context.Context, field graphql.CollectedField, obj *identity.Interface) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Identity",
- Args: nil,
- Field: field,
+ Object: "Identity",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5374,62 +4486,17 @@ func (ec *executionContext) _Identity_isProtected(ctx context.Context, field gra
res := resTmp.(bool)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalBoolean(res)
-}
-
-var identityConnectionImplementors = []string{"IdentityConnection"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _IdentityConnection(ctx context.Context, sel ast.SelectionSet, obj *models.IdentityConnection) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, identityConnectionImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("IdentityConnection")
- case "edges":
- out.Values[i] = ec._IdentityConnection_edges(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "nodes":
- out.Values[i] = ec._IdentityConnection_nodes(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "pageInfo":
- out.Values[i] = ec._IdentityConnection_pageInfo(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "totalCount":
- out.Values[i] = ec._IdentityConnection_totalCount(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _IdentityConnection_edges(ctx context.Context, field graphql.CollectedField, obj *models.IdentityConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "IdentityConnection",
- Args: nil,
- Field: field,
+ Object: "IdentityConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5446,50 +4513,17 @@ func (ec *executionContext) _IdentityConnection_edges(ctx context.Context, field
res := resTmp.([]models.IdentityEdge)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._IdentityEdge(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNIdentityEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐIdentityEdge(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _IdentityConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *models.IdentityConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "IdentityConnection",
- Args: nil,
- Field: field,
+ Object: "IdentityConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5506,50 +4540,17 @@ func (ec *executionContext) _IdentityConnection_nodes(ctx context.Context, field
res := resTmp.([]identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._Identity(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNIdentity2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _IdentityConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *models.IdentityConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "IdentityConnection",
- Args: nil,
- Field: field,
+ Object: "IdentityConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5566,18 +4567,17 @@ func (ec *executionContext) _IdentityConnection_pageInfo(ctx context.Context, fi
res := resTmp.(models.PageInfo)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._PageInfo(ctx, field.Selections, &res)
+ return ec.marshalNPageInfo2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐPageInfo(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _IdentityConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *models.IdentityConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "IdentityConnection",
- Args: nil,
- Field: field,
+ Object: "IdentityConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5594,52 +4594,17 @@ func (ec *executionContext) _IdentityConnection_totalCount(ctx context.Context,
res := resTmp.(int)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalInt(res)
-}
-
-var identityEdgeImplementors = []string{"IdentityEdge"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _IdentityEdge(ctx context.Context, sel ast.SelectionSet, obj *models.IdentityEdge) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, identityEdgeImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("IdentityEdge")
- case "cursor":
- out.Values[i] = ec._IdentityEdge_cursor(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "node":
- out.Values[i] = ec._IdentityEdge_node(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalNInt2int(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _IdentityEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *models.IdentityEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "IdentityEdge",
- Args: nil,
- Field: field,
+ Object: "IdentityEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5656,17 +4621,17 @@ func (ec *executionContext) _IdentityEdge_cursor(ctx context.Context, field grap
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _IdentityEdge_node(ctx context.Context, field graphql.CollectedField, obj *models.IdentityEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "IdentityEdge",
- Args: nil,
- Field: field,
+ Object: "IdentityEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5683,73 +4648,17 @@ func (ec *executionContext) _IdentityEdge_node(ctx context.Context, field graphq
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
-}
-
-var labelChangeOperationImplementors = []string{"LabelChangeOperation", "Operation", "Authored"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _LabelChangeOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.LabelChangeOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, labelChangeOperationImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("LabelChangeOperation")
- case "hash":
- out.Values[i] = ec._LabelChangeOperation_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._LabelChangeOperation_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._LabelChangeOperation_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "added":
- out.Values[i] = ec._LabelChangeOperation_added(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "removed":
- out.Values[i] = ec._LabelChangeOperation_removed(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeOperation_hash(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeOperation",
- Args: nil,
- Field: field,
+ Object: "LabelChangeOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5766,17 +4675,17 @@ func (ec *executionContext) _LabelChangeOperation_hash(ctx context.Context, fiel
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeOperation_author(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeOperation",
- Args: nil,
- Field: field,
+ Object: "LabelChangeOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5793,18 +4702,17 @@ func (ec *executionContext) _LabelChangeOperation_author(ctx context.Context, fi
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeOperation_date(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeOperation",
- Args: nil,
- Field: field,
+ Object: "LabelChangeOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5818,20 +4726,20 @@ func (ec *executionContext) _LabelChangeOperation_date(ctx context.Context, fiel
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeOperation_added(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeOperation",
- Args: nil,
- Field: field,
+ Object: "LabelChangeOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5848,26 +4756,17 @@ func (ec *executionContext) _LabelChangeOperation_added(ctx context.Context, fie
res := resTmp.([]bug.Label)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
+ return ec.marshalNLabel2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeOperation_removed(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeOperation",
- Args: nil,
- Field: field,
+ Object: "LabelChangeOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5884,81 +4783,17 @@ func (ec *executionContext) _LabelChangeOperation_removed(ctx context.Context, f
res := resTmp.([]bug.Label)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
-}
-
-var labelChangeTimelineItemImplementors = []string{"LabelChangeTimelineItem", "TimelineItem"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _LabelChangeTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.LabelChangeTimelineItem) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, labelChangeTimelineItemImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("LabelChangeTimelineItem")
- case "hash":
- out.Values[i] = ec._LabelChangeTimelineItem_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._LabelChangeTimelineItem_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._LabelChangeTimelineItem_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "added":
- out.Values[i] = ec._LabelChangeTimelineItem_added(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "removed":
- out.Values[i] = ec._LabelChangeTimelineItem_removed(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalNLabel2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeTimelineItem_hash(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeTimelineItem",
- Args: nil,
- Field: field,
+ Object: "LabelChangeTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -5975,17 +4810,17 @@ func (ec *executionContext) _LabelChangeTimelineItem_hash(ctx context.Context, f
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeTimelineItem_author(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeTimelineItem",
- Args: nil,
- Field: field,
+ Object: "LabelChangeTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6002,18 +4837,17 @@ func (ec *executionContext) _LabelChangeTimelineItem_author(ctx context.Context,
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeTimelineItem_date(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeTimelineItem",
- Args: nil,
- Field: field,
+ Object: "LabelChangeTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6027,20 +4861,20 @@ func (ec *executionContext) _LabelChangeTimelineItem_date(ctx context.Context, f
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeTimelineItem_added(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeTimelineItem",
- Args: nil,
- Field: field,
+ Object: "LabelChangeTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6057,26 +4891,17 @@ func (ec *executionContext) _LabelChangeTimelineItem_added(ctx context.Context,
res := resTmp.([]bug.Label)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
+ return ec.marshalNLabel2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _LabelChangeTimelineItem_removed(ctx context.Context, field graphql.CollectedField, obj *bug.LabelChangeTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "LabelChangeTimelineItem",
- Args: nil,
- Field: field,
+ Object: "LabelChangeTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6093,98 +4918,26 @@ func (ec *executionContext) _LabelChangeTimelineItem_removed(ctx context.Context
res := resTmp.([]bug.Label)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return res[idx1]
- }()
- }
-
- return arr1
+ return ec.marshalNLabel2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx, field.Selections, res)
}
-var mutationImplementors = []string{"Mutation"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, mutationImplementors)
-
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
- Object: "Mutation",
- })
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("Mutation")
- case "newBug":
- out.Values[i] = ec._Mutation_newBug(ctx, field)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "addComment":
- out.Values[i] = ec._Mutation_addComment(ctx, field)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "changeLabels":
- out.Values[i] = ec._Mutation_changeLabels(ctx, field)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "open":
- out.Values[i] = ec._Mutation_open(ctx, field)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "close":
- out.Values[i] = ec._Mutation_close(ctx, field)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "setTitle":
- out.Values[i] = ec._Mutation_setTitle(ctx, field)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "commit":
- out.Values[i] = ec._Mutation_commit(ctx, field)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _Mutation_newBug(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Mutation_newBug_args(rawArgs)
+ args, err := ec.field_Mutation_newBug_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Mutation",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -6196,29 +4949,29 @@ func (ec *executionContext) _Mutation_newBug(ctx context.Context, field graphql.
}
return graphql.Null
}
- res := resTmp.(bug.Snapshot)
+ res := resTmp.(*bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Bug(ctx, field.Selections, &res)
+ return ec.marshalNBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Mutation_addComment(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Mutation_addComment_args(rawArgs)
+ args, err := ec.field_Mutation_addComment_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Mutation",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -6230,29 +4983,29 @@ func (ec *executionContext) _Mutation_addComment(ctx context.Context, field grap
}
return graphql.Null
}
- res := resTmp.(bug.Snapshot)
+ res := resTmp.(*bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Bug(ctx, field.Selections, &res)
+ return ec.marshalNBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Mutation_changeLabels(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Mutation_changeLabels_args(rawArgs)
+ args, err := ec.field_Mutation_changeLabels_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Mutation",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -6264,29 +5017,29 @@ func (ec *executionContext) _Mutation_changeLabels(ctx context.Context, field gr
}
return graphql.Null
}
- res := resTmp.(bug.Snapshot)
+ res := resTmp.(*bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Bug(ctx, field.Selections, &res)
+ return ec.marshalNBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Mutation_open(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Mutation_open_args(rawArgs)
+ args, err := ec.field_Mutation_open_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Mutation",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -6298,29 +5051,29 @@ func (ec *executionContext) _Mutation_open(ctx context.Context, field graphql.Co
}
return graphql.Null
}
- res := resTmp.(bug.Snapshot)
+ res := resTmp.(*bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Bug(ctx, field.Selections, &res)
+ return ec.marshalNBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Mutation_close(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Mutation_close_args(rawArgs)
+ args, err := ec.field_Mutation_close_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Mutation",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -6332,29 +5085,29 @@ func (ec *executionContext) _Mutation_close(ctx context.Context, field graphql.C
}
return graphql.Null
}
- res := resTmp.(bug.Snapshot)
+ res := resTmp.(*bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Bug(ctx, field.Selections, &res)
+ return ec.marshalNBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Mutation_setTitle(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Mutation_setTitle_args(rawArgs)
+ args, err := ec.field_Mutation_setTitle_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Mutation",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -6366,29 +5119,29 @@ func (ec *executionContext) _Mutation_setTitle(ctx context.Context, field graphq
}
return graphql.Null
}
- res := resTmp.(bug.Snapshot)
+ res := resTmp.(*bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Bug(ctx, field.Selections, &res)
+ return ec.marshalNBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Mutation_commit(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Mutation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Mutation_commit_args(rawArgs)
+ args, err := ec.field_Mutation_commit_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Mutation",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -6400,66 +5153,20 @@ func (ec *executionContext) _Mutation_commit(ctx context.Context, field graphql.
}
return graphql.Null
}
- res := resTmp.(bug.Snapshot)
+ res := resTmp.(*bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Bug(ctx, field.Selections, &res)
+ return ec.marshalNBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-var operationConnectionImplementors = []string{"OperationConnection"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _OperationConnection(ctx context.Context, sel ast.SelectionSet, obj *models.OperationConnection) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, operationConnectionImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("OperationConnection")
- case "edges":
- out.Values[i] = ec._OperationConnection_edges(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "nodes":
- out.Values[i] = ec._OperationConnection_nodes(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "pageInfo":
- out.Values[i] = ec._OperationConnection_pageInfo(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "totalCount":
- out.Values[i] = ec._OperationConnection_totalCount(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _OperationConnection_edges(ctx context.Context, field graphql.CollectedField, obj *models.OperationConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "OperationConnection",
- Args: nil,
- Field: field,
+ Object: "OperationConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6476,50 +5183,17 @@ func (ec *executionContext) _OperationConnection_edges(ctx context.Context, fiel
res := resTmp.([]models.OperationEdge)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._OperationEdge(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNOperationEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐOperationEdge(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _OperationConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *models.OperationConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "OperationConnection",
- Args: nil,
- Field: field,
+ Object: "OperationConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6536,50 +5210,17 @@ func (ec *executionContext) _OperationConnection_nodes(ctx context.Context, fiel
res := resTmp.([]bug.Operation)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._Operation(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNOperation2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐOperation(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _OperationConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *models.OperationConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "OperationConnection",
- Args: nil,
- Field: field,
+ Object: "OperationConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6596,18 +5237,17 @@ func (ec *executionContext) _OperationConnection_pageInfo(ctx context.Context, f
res := resTmp.(models.PageInfo)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._PageInfo(ctx, field.Selections, &res)
+ return ec.marshalNPageInfo2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐPageInfo(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _OperationConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *models.OperationConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "OperationConnection",
- Args: nil,
- Field: field,
+ Object: "OperationConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6624,52 +5264,17 @@ func (ec *executionContext) _OperationConnection_totalCount(ctx context.Context,
res := resTmp.(int)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalInt(res)
+ return ec.marshalNInt2int(ctx, field.Selections, res)
}
-var operationEdgeImplementors = []string{"OperationEdge"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _OperationEdge(ctx context.Context, sel ast.SelectionSet, obj *models.OperationEdge) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, operationEdgeImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("OperationEdge")
- case "cursor":
- out.Values[i] = ec._OperationEdge_cursor(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "node":
- out.Values[i] = ec._OperationEdge_node(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _OperationEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *models.OperationEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "OperationEdge",
- Args: nil,
- Field: field,
+ Object: "OperationEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6686,17 +5291,17 @@ func (ec *executionContext) _OperationEdge_cursor(ctx context.Context, field gra
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _OperationEdge_node(ctx context.Context, field graphql.CollectedField, obj *models.OperationEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "OperationEdge",
- Args: nil,
- Field: field,
+ Object: "OperationEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6713,63 +5318,17 @@ func (ec *executionContext) _OperationEdge_node(ctx context.Context, field graph
res := resTmp.(bug.Operation)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Operation(ctx, field.Selections, &res)
+ return ec.marshalNOperation2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐOperation(ctx, field.Selections, res)
}
-var pageInfoImplementors = []string{"PageInfo"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, obj *models.PageInfo) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, pageInfoImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("PageInfo")
- case "hasNextPage":
- out.Values[i] = ec._PageInfo_hasNextPage(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "hasPreviousPage":
- out.Values[i] = ec._PageInfo_hasPreviousPage(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "startCursor":
- out.Values[i] = ec._PageInfo_startCursor(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "endCursor":
- out.Values[i] = ec._PageInfo_endCursor(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField, obj *models.PageInfo) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "PageInfo",
- Args: nil,
- Field: field,
+ Object: "PageInfo",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6786,17 +5345,17 @@ func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field gra
res := resTmp.(bool)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalBoolean(res)
+ return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField, obj *models.PageInfo) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "PageInfo",
- Args: nil,
- Field: field,
+ Object: "PageInfo",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6813,17 +5372,17 @@ func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field
res := resTmp.(bool)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalBoolean(res)
+ return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *models.PageInfo) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "PageInfo",
- Args: nil,
- Field: field,
+ Object: "PageInfo",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6840,17 +5399,17 @@ func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field gra
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graphql.CollectedField, obj *models.PageInfo) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "PageInfo",
- Args: nil,
- Field: field,
+ Object: "PageInfo",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6867,63 +5426,17 @@ func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graph
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-var queryImplementors = []string{"Query"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, queryImplementors)
-
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
- Object: "Query",
- })
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("Query")
- case "defaultRepository":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Query_defaultRepository(ctx, field)
- wg.Done()
- }(i, field)
- case "repository":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Query_repository(ctx, field)
- wg.Done()
- }(i, field)
- case "__type":
- out.Values[i] = ec._Query___type(ctx, field)
- case "__schema":
- out.Values[i] = ec._Query___schema(ctx, field)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _Query_defaultRepository(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Query",
- Args: nil,
- Field: field,
+ Object: "Query",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -6937,30 +5450,26 @@ func (ec *executionContext) _Query_defaultRepository(ctx context.Context, field
res := resTmp.(*models.Repository)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
-
- return ec._Repository(ctx, field.Selections, res)
+ return ec.marshalORepository2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐRepository(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Query_repository(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Query",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Query_repository_args(rawArgs)
+ args, err := ec.field_Query_repository_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Query",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -6972,30 +5481,26 @@ func (ec *executionContext) _Query_repository(ctx context.Context, field graphql
res := resTmp.(*models.Repository)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
-
- return ec._Repository(ctx, field.Selections, res)
+ return ec.marshalORepository2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐRepository(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Query",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Query___type_args(rawArgs)
+ args, err := ec.field_Query___type_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Query",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -7007,22 +5512,17 @@ func (ec *executionContext) _Query___type(ctx context.Context, field graphql.Col
res := resTmp.(*introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
-
- return ec.___Type(ctx, field.Selections, res)
+ return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Query",
- Args: nil,
- Field: field,
+ Object: "Query",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7036,92 +5536,26 @@ func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.C
res := resTmp.(*introspection.Schema)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
-
- return ec.___Schema(ctx, field.Selections, res)
-}
-
-var repositoryImplementors = []string{"Repository"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _Repository(ctx context.Context, sel ast.SelectionSet, obj *models.Repository) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, repositoryImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("Repository")
- case "allBugs":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Repository_allBugs(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "bug":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Repository_bug(ctx, field, obj)
- wg.Done()
- }(i, field)
- case "allIdentities":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Repository_allIdentities(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "identity":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Repository_identity(ctx, field, obj)
- wg.Done()
- }(i, field)
- case "userIdentity":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._Repository_userIdentity(ctx, field, obj)
- wg.Done()
- }(i, field)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Repository_allBugs(ctx context.Context, field graphql.CollectedField, obj *models.Repository) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Repository",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Repository_allBugs_args(rawArgs)
+ args, err := ec.field_Repository_allBugs_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Repository",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -7133,29 +5567,29 @@ func (ec *executionContext) _Repository_allBugs(ctx context.Context, field graph
}
return graphql.Null
}
- res := resTmp.(models.BugConnection)
+ res := resTmp.(*models.BugConnection)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._BugConnection(ctx, field.Selections, &res)
+ return ec.marshalNBugConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐBugConnection(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Repository_bug(ctx context.Context, field graphql.CollectedField, obj *models.Repository) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Repository",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Repository_bug_args(rawArgs)
+ args, err := ec.field_Repository_bug_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Repository",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -7167,30 +5601,26 @@ func (ec *executionContext) _Repository_bug(ctx context.Context, field graphql.C
res := resTmp.(*bug.Snapshot)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
-
- return ec._Bug(ctx, field.Selections, res)
+ return ec.marshalOBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Repository_allIdentities(ctx context.Context, field graphql.CollectedField, obj *models.Repository) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Repository",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Repository_allIdentities_args(rawArgs)
+ args, err := ec.field_Repository_allIdentities_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Repository",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -7202,29 +5632,29 @@ func (ec *executionContext) _Repository_allIdentities(ctx context.Context, field
}
return graphql.Null
}
- res := resTmp.(models.IdentityConnection)
+ res := resTmp.(*models.IdentityConnection)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._IdentityConnection(ctx, field.Selections, &res)
+ return ec.marshalNIdentityConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐIdentityConnection(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Repository_identity(ctx context.Context, field graphql.CollectedField, obj *models.Repository) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "Repository",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field_Repository_identity_args(rawArgs)
+ args, err := ec.field_Repository_identity_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "Repository",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -7233,25 +5663,20 @@ func (ec *executionContext) _Repository_identity(ctx context.Context, field grap
if resTmp == nil {
return graphql.Null
}
- res := resTmp.(*identity.Interface)
+ res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
-
- return ec._Identity(ctx, field.Selections, res)
+ return ec.marshalOIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _Repository_userIdentity(ctx context.Context, field graphql.CollectedField, obj *models.Repository) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "Repository",
- Args: nil,
- Field: field,
+ Object: "Repository",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7262,79 +5687,20 @@ func (ec *executionContext) _Repository_userIdentity(ctx context.Context, field
if resTmp == nil {
return graphql.Null
}
- res := resTmp.(*identity.Interface)
+ res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
-
- return ec._Identity(ctx, field.Selections, res)
+ return ec.marshalOIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-var setStatusOperationImplementors = []string{"SetStatusOperation", "Operation", "Authored"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _SetStatusOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.SetStatusOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, setStatusOperationImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("SetStatusOperation")
- case "hash":
- out.Values[i] = ec._SetStatusOperation_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._SetStatusOperation_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._SetStatusOperation_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "status":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._SetStatusOperation_status(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _SetStatusOperation_hash(ctx context.Context, field graphql.CollectedField, obj *bug.SetStatusOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetStatusOperation",
- Args: nil,
- Field: field,
+ Object: "SetStatusOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7351,17 +5717,17 @@ func (ec *executionContext) _SetStatusOperation_hash(ctx context.Context, field
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetStatusOperation_author(ctx context.Context, field graphql.CollectedField, obj *bug.SetStatusOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetStatusOperation",
- Args: nil,
- Field: field,
+ Object: "SetStatusOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7378,18 +5744,17 @@ func (ec *executionContext) _SetStatusOperation_author(ctx context.Context, fiel
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetStatusOperation_date(ctx context.Context, field graphql.CollectedField, obj *bug.SetStatusOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetStatusOperation",
- Args: nil,
- Field: field,
+ Object: "SetStatusOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7403,20 +5768,20 @@ func (ec *executionContext) _SetStatusOperation_date(ctx context.Context, field
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetStatusOperation_status(ctx context.Context, field graphql.CollectedField, obj *bug.SetStatusOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetStatusOperation",
- Args: nil,
- Field: field,
+ Object: "SetStatusOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7433,71 +5798,17 @@ func (ec *executionContext) _SetStatusOperation_status(ctx context.Context, fiel
res := resTmp.(models.Status)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
-}
-
-var setStatusTimelineItemImplementors = []string{"SetStatusTimelineItem", "TimelineItem"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _SetStatusTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.SetStatusTimelineItem) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, setStatusTimelineItemImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("SetStatusTimelineItem")
- case "hash":
- out.Values[i] = ec._SetStatusTimelineItem_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._SetStatusTimelineItem_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._SetStatusTimelineItem_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "status":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._SetStatusTimelineItem_status(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalNStatus2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐStatus(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetStatusTimelineItem_hash(ctx context.Context, field graphql.CollectedField, obj *bug.SetStatusTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetStatusTimelineItem",
- Args: nil,
- Field: field,
+ Object: "SetStatusTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7514,17 +5825,17 @@ func (ec *executionContext) _SetStatusTimelineItem_hash(ctx context.Context, fie
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetStatusTimelineItem_author(ctx context.Context, field graphql.CollectedField, obj *bug.SetStatusTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetStatusTimelineItem",
- Args: nil,
- Field: field,
+ Object: "SetStatusTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7541,18 +5852,17 @@ func (ec *executionContext) _SetStatusTimelineItem_author(ctx context.Context, f
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetStatusTimelineItem_date(ctx context.Context, field graphql.CollectedField, obj *bug.SetStatusTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetStatusTimelineItem",
- Args: nil,
- Field: field,
+ Object: "SetStatusTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7566,20 +5876,20 @@ func (ec *executionContext) _SetStatusTimelineItem_date(ctx context.Context, fie
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetStatusTimelineItem_status(ctx context.Context, field graphql.CollectedField, obj *bug.SetStatusTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetStatusTimelineItem",
- Args: nil,
- Field: field,
+ Object: "SetStatusTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7596,72 +5906,17 @@ func (ec *executionContext) _SetStatusTimelineItem_status(ctx context.Context, f
res := resTmp.(models.Status)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
-}
-
-var setTitleOperationImplementors = []string{"SetTitleOperation", "Operation", "Authored"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _SetTitleOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.SetTitleOperation) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, setTitleOperationImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("SetTitleOperation")
- case "hash":
- out.Values[i] = ec._SetTitleOperation_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._SetTitleOperation_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._SetTitleOperation_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "title":
- out.Values[i] = ec._SetTitleOperation_title(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "was":
- out.Values[i] = ec._SetTitleOperation_was(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalNStatus2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐStatus(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetTitleOperation_hash(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleOperation",
- Args: nil,
- Field: field,
+ Object: "SetTitleOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7678,17 +5933,17 @@ func (ec *executionContext) _SetTitleOperation_hash(ctx context.Context, field g
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetTitleOperation_author(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleOperation",
- Args: nil,
- Field: field,
+ Object: "SetTitleOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7705,18 +5960,17 @@ func (ec *executionContext) _SetTitleOperation_author(ctx context.Context, field
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetTitleOperation_date(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleOperation",
- Args: nil,
- Field: field,
+ Object: "SetTitleOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7730,20 +5984,20 @@ func (ec *executionContext) _SetTitleOperation_date(ctx context.Context, field g
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetTitleOperation_title(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleOperation",
- Args: nil,
- Field: field,
+ Object: "SetTitleOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7760,17 +6014,17 @@ func (ec *executionContext) _SetTitleOperation_title(ctx context.Context, field
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetTitleOperation_was(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleOperation) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleOperation",
- Args: nil,
- Field: field,
+ Object: "SetTitleOperation",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7787,72 +6041,17 @@ func (ec *executionContext) _SetTitleOperation_was(ctx context.Context, field gr
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-var setTitleTimelineItemImplementors = []string{"SetTitleTimelineItem", "TimelineItem"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _SetTitleTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.SetTitleTimelineItem) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, setTitleTimelineItemImplementors)
-
- var wg sync.WaitGroup
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("SetTitleTimelineItem")
- case "hash":
- out.Values[i] = ec._SetTitleTimelineItem_hash(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "author":
- out.Values[i] = ec._SetTitleTimelineItem_author(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "date":
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- out.Values[i] = ec._SetTitleTimelineItem_date(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- wg.Done()
- }(i, field)
- case "title":
- out.Values[i] = ec._SetTitleTimelineItem_title(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "was":
- out.Values[i] = ec._SetTitleTimelineItem_was(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- wg.Wait()
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _SetTitleTimelineItem_hash(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleTimelineItem",
- Args: nil,
- Field: field,
+ Object: "SetTitleTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7869,17 +6068,17 @@ func (ec *executionContext) _SetTitleTimelineItem_hash(ctx context.Context, fiel
res := resTmp.(git.Hash)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return res
+ return ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetTitleTimelineItem_author(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleTimelineItem",
- Args: nil,
- Field: field,
+ Object: "SetTitleTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7896,18 +6095,17 @@ func (ec *executionContext) _SetTitleTimelineItem_author(ctx context.Context, fi
res := resTmp.(identity.Interface)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._Identity(ctx, field.Selections, &res)
+ return ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetTitleTimelineItem_date(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleTimelineItem",
- Args: nil,
- Field: field,
+ Object: "SetTitleTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7921,20 +6119,20 @@ func (ec *executionContext) _SetTitleTimelineItem_date(ctx context.Context, fiel
}
return graphql.Null
}
- res := resTmp.(time.Time)
+ res := resTmp.(*time.Time)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalTime(res)
+ return ec.marshalNTime2ᚖtimeᚐTime(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetTitleTimelineItem_title(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleTimelineItem",
- Args: nil,
- Field: field,
+ Object: "SetTitleTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7951,17 +6149,17 @@ func (ec *executionContext) _SetTitleTimelineItem_title(ctx context.Context, fie
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _SetTitleTimelineItem_was(ctx context.Context, field graphql.CollectedField, obj *bug.SetTitleTimelineItem) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "SetTitleTimelineItem",
- Args: nil,
- Field: field,
+ Object: "SetTitleTimelineItem",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -7978,62 +6176,17 @@ func (ec *executionContext) _SetTitleTimelineItem_was(ctx context.Context, field
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-var timelineItemConnectionImplementors = []string{"TimelineItemConnection"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _TimelineItemConnection(ctx context.Context, sel ast.SelectionSet, obj *models.TimelineItemConnection) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, timelineItemConnectionImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("TimelineItemConnection")
- case "edges":
- out.Values[i] = ec._TimelineItemConnection_edges(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "nodes":
- out.Values[i] = ec._TimelineItemConnection_nodes(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "pageInfo":
- out.Values[i] = ec._TimelineItemConnection_pageInfo(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "totalCount":
- out.Values[i] = ec._TimelineItemConnection_totalCount(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _TimelineItemConnection_edges(ctx context.Context, field graphql.CollectedField, obj *models.TimelineItemConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "TimelineItemConnection",
- Args: nil,
- Field: field,
+ Object: "TimelineItemConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8050,50 +6203,17 @@ func (ec *executionContext) _TimelineItemConnection_edges(ctx context.Context, f
res := resTmp.([]models.TimelineItemEdge)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._TimelineItemEdge(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNTimelineItemEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐTimelineItemEdge(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _TimelineItemConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *models.TimelineItemConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "TimelineItemConnection",
- Args: nil,
- Field: field,
+ Object: "TimelineItemConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8110,50 +6230,17 @@ func (ec *executionContext) _TimelineItemConnection_nodes(ctx context.Context, f
res := resTmp.([]bug.TimelineItem)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec._TimelineItem(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalNTimelineItem2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐTimelineItem(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _TimelineItemConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *models.TimelineItemConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "TimelineItemConnection",
- Args: nil,
- Field: field,
+ Object: "TimelineItemConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8170,18 +6257,17 @@ func (ec *executionContext) _TimelineItemConnection_pageInfo(ctx context.Context
res := resTmp.(models.PageInfo)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._PageInfo(ctx, field.Selections, &res)
+ return ec.marshalNPageInfo2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐPageInfo(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _TimelineItemConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *models.TimelineItemConnection) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "TimelineItemConnection",
- Args: nil,
- Field: field,
+ Object: "TimelineItemConnection",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8198,52 +6284,17 @@ func (ec *executionContext) _TimelineItemConnection_totalCount(ctx context.Conte
res := resTmp.(int)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalInt(res)
+ return ec.marshalNInt2int(ctx, field.Selections, res)
}
-var timelineItemEdgeImplementors = []string{"TimelineItemEdge"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) _TimelineItemEdge(ctx context.Context, sel ast.SelectionSet, obj *models.TimelineItemEdge) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, timelineItemEdgeImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("TimelineItemEdge")
- case "cursor":
- out.Values[i] = ec._TimelineItemEdge_cursor(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "node":
- out.Values[i] = ec._TimelineItemEdge_node(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) _TimelineItemEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *models.TimelineItemEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "TimelineItemEdge",
- Args: nil,
- Field: field,
+ Object: "TimelineItemEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8260,17 +6311,17 @@ func (ec *executionContext) _TimelineItemEdge_cursor(ctx context.Context, field
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) _TimelineItemEdge_node(ctx context.Context, field graphql.CollectedField, obj *models.TimelineItemEdge) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "TimelineItemEdge",
- Args: nil,
- Field: field,
+ Object: "TimelineItemEdge",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8287,60 +6338,17 @@ func (ec *executionContext) _TimelineItemEdge_node(ctx context.Context, field gr
res := resTmp.(bug.TimelineItem)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- return ec._TimelineItem(ctx, field.Selections, &res)
-}
-
-var __DirectiveImplementors = []string{"__Directive"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, __DirectiveImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("__Directive")
- case "name":
- out.Values[i] = ec.___Directive_name(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "description":
- out.Values[i] = ec.___Directive_description(ctx, field, obj)
- case "locations":
- out.Values[i] = ec.___Directive_locations(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "args":
- out.Values[i] = ec.___Directive_args(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
+ return ec.marshalNTimelineItem2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐTimelineItem(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Directive",
- Args: nil,
- Field: field,
+ Object: "__Directive",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8357,17 +6365,17 @@ func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Directive",
- Args: nil,
- Field: field,
+ Object: "__Directive",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8381,17 +6389,17 @@ func (ec *executionContext) ___Directive_description(ctx context.Context, field
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalOString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Directive",
- Args: nil,
- Field: field,
+ Object: "__Directive",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8408,26 +6416,17 @@ func (ec *executionContext) ___Directive_locations(ctx context.Context, field gr
res := resTmp.([]string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
-
- for idx1 := range res {
- arr1[idx1] = func() graphql.Marshaler {
- return graphql.MarshalString(res[idx1])
- }()
- }
-
- return arr1
+ return ec.marshalN__DirectiveLocation2ᚕstring(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Directive",
- Args: nil,
- Field: field,
+ Object: "__Directive",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8444,89 +6443,17 @@ func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql
res := resTmp.([]introspection.InputValue)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec.___InputValue(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, field.Selections, res)
}
-var __EnumValueImplementors = []string{"__EnumValue"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, __EnumValueImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("__EnumValue")
- case "name":
- out.Values[i] = ec.___EnumValue_name(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "description":
- out.Values[i] = ec.___EnumValue_description(ctx, field, obj)
- case "isDeprecated":
- out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "deprecationReason":
- out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__EnumValue",
- Args: nil,
- Field: field,
+ Object: "__EnumValue",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8543,17 +6470,17 @@ func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__EnumValue",
- Args: nil,
- Field: field,
+ Object: "__EnumValue",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8567,17 +6494,17 @@ func (ec *executionContext) ___EnumValue_description(ctx context.Context, field
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalOString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__EnumValue",
- Args: nil,
- Field: field,
+ Object: "__EnumValue",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8594,17 +6521,17 @@ func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field
res := resTmp.(bool)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalBoolean(res)
+ return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__EnumValue",
- Args: nil,
- Field: field,
+ Object: "__EnumValue",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8618,70 +6545,17 @@ func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context,
res := resTmp.(*string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
- return graphql.MarshalString(*res)
+ return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
-var __FieldImplementors = []string{"__Field"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, __FieldImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("__Field")
- case "name":
- out.Values[i] = ec.___Field_name(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "description":
- out.Values[i] = ec.___Field_description(ctx, field, obj)
- case "args":
- out.Values[i] = ec.___Field_args(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "type":
- out.Values[i] = ec.___Field_type(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "isDeprecated":
- out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "deprecationReason":
- out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Field",
- Args: nil,
- Field: field,
+ Object: "__Field",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8698,17 +6572,17 @@ func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.Col
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Field",
- Args: nil,
- Field: field,
+ Object: "__Field",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8722,17 +6596,17 @@ func (ec *executionContext) ___Field_description(ctx context.Context, field grap
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalOString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Field",
- Args: nil,
- Field: field,
+ Object: "__Field",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8749,50 +6623,17 @@ func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.Col
res := resTmp.([]introspection.InputValue)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec.___InputValue(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Field",
- Args: nil,
- Field: field,
+ Object: "__Field",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8809,25 +6650,17 @@ func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.Col
res := resTmp.(*introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- if !ec.HasError(rctx) {
- ec.Errorf(ctx, "must not be null")
- }
- return graphql.Null
- }
-
- return ec.___Type(ctx, field.Selections, res)
+ return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Field",
- Args: nil,
- Field: field,
+ Object: "__Field",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8844,17 +6677,17 @@ func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field gra
res := resTmp.(bool)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalBoolean(res)
+ return ec.marshalNBoolean2bool(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Field",
- Args: nil,
- Field: field,
+ Object: "__Field",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8868,60 +6701,17 @@ func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, fiel
res := resTmp.(*string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
- return graphql.MarshalString(*res)
+ return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
-var __InputValueImplementors = []string{"__InputValue"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, __InputValueImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("__InputValue")
- case "name":
- out.Values[i] = ec.___InputValue_name(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "description":
- out.Values[i] = ec.___InputValue_description(ctx, field, obj)
- case "type":
- out.Values[i] = ec.___InputValue_type(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "defaultValue":
- out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__InputValue",
- Args: nil,
- Field: field,
+ Object: "__InputValue",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8938,17 +6728,17 @@ func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphq
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalNString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__InputValue",
- Args: nil,
- Field: field,
+ Object: "__InputValue",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8962,17 +6752,17 @@ func (ec *executionContext) ___InputValue_description(ctx context.Context, field
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalOString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__InputValue",
- Args: nil,
- Field: field,
+ Object: "__InputValue",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -8989,25 +6779,17 @@ func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphq
res := resTmp.(*introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- if !ec.HasError(rctx) {
- ec.Errorf(ctx, "must not be null")
- }
- return graphql.Null
- }
-
- return ec.___Type(ctx, field.Selections, res)
+ return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__InputValue",
- Args: nil,
- Field: field,
+ Object: "__InputValue",
+ Field: field,
+ Args: nil,
+ IsMethod: false,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9021,65 +6803,17 @@ func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, fiel
res := resTmp.(*string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
- return graphql.MarshalString(*res)
+ return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
-var __SchemaImplementors = []string{"__Schema"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, __SchemaImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("__Schema")
- case "types":
- out.Values[i] = ec.___Schema_types(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "queryType":
- out.Values[i] = ec.___Schema_queryType(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "mutationType":
- out.Values[i] = ec.___Schema_mutationType(ctx, field, obj)
- case "subscriptionType":
- out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj)
- case "directives":
- out.Values[i] = ec.___Schema_directives(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Schema",
- Args: nil,
- Field: field,
+ Object: "__Schema",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9096,50 +6830,17 @@ func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.C
res := resTmp.([]introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec.___Type(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalN__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Schema",
- Args: nil,
- Field: field,
+ Object: "__Schema",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9156,25 +6857,17 @@ func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graph
res := resTmp.(*introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- if !ec.HasError(rctx) {
- ec.Errorf(ctx, "must not be null")
- }
- return graphql.Null
- }
-
- return ec.___Type(ctx, field.Selections, res)
+ return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Schema",
- Args: nil,
- Field: field,
+ Object: "__Schema",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9188,22 +6881,17 @@ func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field gr
res := resTmp.(*introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
-
- return ec.___Type(ctx, field.Selections, res)
+ return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Schema",
- Args: nil,
- Field: field,
+ Object: "__Schema",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9217,22 +6905,17 @@ func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, fiel
res := resTmp.(*introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
-
- return ec.___Type(ctx, field.Selections, res)
+ return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Schema_directives(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Schema",
- Args: nil,
- Field: field,
+ Object: "__Schema",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9249,96 +6932,17 @@ func (ec *executionContext) ___Schema_directives(ctx context.Context, field grap
res := resTmp.([]introspection.Directive)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec.___Directive(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalN__Directive2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx, field.Selections, res)
}
-var __TypeImplementors = []string{"__Type"}
-
-// nolint: gocyclo, errcheck, gas, goconst
-func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, __TypeImplementors)
-
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString("__Type")
- case "kind":
- out.Values[i] = ec.___Type_kind(ctx, field, obj)
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- case "name":
- out.Values[i] = ec.___Type_name(ctx, field, obj)
- case "description":
- out.Values[i] = ec.___Type_description(ctx, field, obj)
- case "fields":
- out.Values[i] = ec.___Type_fields(ctx, field, obj)
- case "interfaces":
- out.Values[i] = ec.___Type_interfaces(ctx, field, obj)
- case "possibleTypes":
- out.Values[i] = ec.___Type_possibleTypes(ctx, field, obj)
- case "enumValues":
- out.Values[i] = ec.___Type_enumValues(ctx, field, obj)
- case "inputFields":
- out.Values[i] = ec.___Type_inputFields(ctx, field, obj)
- case "ofType":
- out.Values[i] = ec.___Type_ofType(ctx, field, obj)
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
-
- if invalid {
- return graphql.Null
- }
- return out
-}
-
-// nolint: vetshadow
func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Type",
- Args: nil,
- Field: field,
+ Object: "__Type",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9355,17 +6959,17 @@ func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.Coll
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalN__TypeKind2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Type",
- Args: nil,
- Field: field,
+ Object: "__Type",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9379,21 +6983,17 @@ func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.Coll
res := resTmp.(*string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- if res == nil {
- return graphql.Null
- }
- return graphql.MarshalString(*res)
+ return ec.marshalOString2ᚖstring(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Type_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Type",
- Args: nil,
- Field: field,
+ Object: "__Type",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9407,25 +7007,26 @@ func (ec *executionContext) ___Type_description(ctx context.Context, field graph
res := resTmp.(string)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- return graphql.MarshalString(res)
+ return ec.marshalOString2string(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field___Type_fields_args(rawArgs)
+ args, err := ec.field___Type_fields_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "__Type",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -9437,50 +7038,17 @@ func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.Co
res := resTmp.([]introspection.Field)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec.___Field(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalO__Field2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Type",
- Args: nil,
- Field: field,
+ Object: "__Type",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9494,50 +7062,17 @@ func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphq
res := resTmp.([]introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec.___Type(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Type",
- Args: nil,
- Field: field,
+ Object: "__Type",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9551,58 +7086,26 @@ func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field gra
res := resTmp.([]introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec.___Type(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: "__Type",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := field___Type_enumValues_args(rawArgs)
+ args, err := ec.field___Type_enumValues_args(ctx, rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
- rctx := &graphql.ResolverContext{
- Object: "__Type",
- Args: args,
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
+ rctx.Args = args
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
@@ -9614,50 +7117,17 @@ func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphq
res := resTmp.([]introspection.EnumValue)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec.___EnumValue(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Type",
- Args: nil,
- Field: field,
+ Object: "__Type",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9671,50 +7141,17 @@ func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graph
res := resTmp.([]introspection.InputValue)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
-
- arr1 := make(graphql.Array, len(res))
- var wg sync.WaitGroup
-
- isLen1 := len(res) == 1
- if !isLen1 {
- wg.Add(len(res))
- }
-
- for idx1 := range res {
- idx1 := idx1
- rctx := &graphql.ResolverContext{
- Index: &idx1,
- Result: &res[idx1],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func(idx1 int) {
- if !isLen1 {
- defer wg.Done()
- }
- arr1[idx1] = func() graphql.Marshaler {
-
- return ec.___InputValue(ctx, field.Selections, &res[idx1])
- }()
- }
- if isLen1 {
- f(idx1)
- } else {
- go f(idx1)
- }
-
- }
- wg.Wait()
- return arr1
+ return ec.marshalO__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, field.Selections, res)
}
-// nolint: vetshadow
func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
rctx := &graphql.ResolverContext{
- Object: "__Type",
- Args: nil,
- Field: field,
+ Object: "__Type",
+ Field: field,
+ Args: nil,
+ IsMethod: true,
}
ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
@@ -9728,13 +7165,16 @@ func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.Co
res := resTmp.(*introspection.Type)
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
+ return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res)
+}
- if res == nil {
- return graphql.Null
- }
+// endregion **************************** field.gotpl *****************************
- return ec.___Type(ctx, field.Selections, res)
-}
+// region **************************** input.gotpl *****************************
+
+// endregion **************************** input.gotpl *****************************
+
+// region ************************** interface.gotpl ***************************
func (ec *executionContext) _Authored(ctx context.Context, sel ast.SelectionSet, obj *models.Authored) graphql.Marshaler {
switch obj := (*obj).(type) {
@@ -9807,447 +7247,3134 @@ func (ec *executionContext) _TimelineItem(ctx context.Context, sel ast.Selection
}
}
-func (ec *executionContext) FieldMiddleware(ctx context.Context, obj interface{}, next graphql.Resolver) (ret interface{}) {
- defer func() {
- if r := recover(); r != nil {
- ec.Error(ctx, ec.Recover(ctx, r))
- ret = nil
+// endregion ************************** interface.gotpl ***************************
+
+// region **************************** object.gotpl ****************************
+
+var addCommentOperationImplementors = []string{"AddCommentOperation", "Operation", "Authored"}
+
+func (ec *executionContext) _AddCommentOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.AddCommentOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, addCommentOperationImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("AddCommentOperation")
+ case "hash":
+ out.Values[i] = ec._AddCommentOperation_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._AddCommentOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._AddCommentOperation_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "message":
+ out.Values[i] = ec._AddCommentOperation_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "files":
+ out.Values[i] = ec._AddCommentOperation_files(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
}
- }()
- res, err := ec.ResolverMiddleware(ctx, next)
- if err != nil {
- ec.Error(ctx, err)
- return nil
}
- return res
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-func (ec *executionContext) introspectSchema() (*introspection.Schema, error) {
- if ec.DisableIntrospection {
- return nil, errors.New("introspection disabled")
+var addCommentTimelineItemImplementors = []string{"AddCommentTimelineItem", "TimelineItem"}
+
+func (ec *executionContext) _AddCommentTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.AddCommentTimelineItem) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, addCommentTimelineItemImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("AddCommentTimelineItem")
+ case "hash":
+ out.Values[i] = ec._AddCommentTimelineItem_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._AddCommentTimelineItem_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "message":
+ out.Values[i] = ec._AddCommentTimelineItem_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "messageIsEmpty":
+ out.Values[i] = ec._AddCommentTimelineItem_messageIsEmpty(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "files":
+ out.Values[i] = ec._AddCommentTimelineItem_files(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "createdAt":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._AddCommentTimelineItem_createdAt(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "lastEdit":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._AddCommentTimelineItem_lastEdit(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "edited":
+ out.Values[i] = ec._AddCommentTimelineItem_edited(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "history":
+ out.Values[i] = ec._AddCommentTimelineItem_history(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
}
- return introspection.WrapSchema(parsedSchema), nil
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-func (ec *executionContext) introspectType(name string) (*introspection.Type, error) {
- if ec.DisableIntrospection {
- return nil, errors.New("introspection disabled")
+var bugImplementors = []string{"Bug"}
+
+func (ec *executionContext) _Bug(ctx context.Context, sel ast.SelectionSet, obj *bug.Snapshot) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, bugImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("Bug")
+ case "id":
+ out.Values[i] = ec._Bug_id(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "humanId":
+ out.Values[i] = ec._Bug_humanId(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "status":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Bug_status(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "title":
+ out.Values[i] = ec._Bug_title(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "labels":
+ out.Values[i] = ec._Bug_labels(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._Bug_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "createdAt":
+ out.Values[i] = ec._Bug_createdAt(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "lastEdit":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Bug_lastEdit(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "actors":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Bug_actors(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "participants":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Bug_participants(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "comments":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Bug_comments(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "timeline":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Bug_timeline(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "operations":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Bug_operations(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
}
- return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name]), nil
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-var parsedSchema = gqlparser.MustLoadSchema(
- &ast.Source{Name: "schema/bug.graphql", Input: `"""Represents a comment on a bug."""
-type Comment implements Authored {
- """The author of this comment."""
- author: Identity!
+var bugConnectionImplementors = []string{"BugConnection"}
- """The message of this comment."""
- message: String!
+func (ec *executionContext) _BugConnection(ctx context.Context, sel ast.SelectionSet, obj *models.BugConnection) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, bugConnectionImplementors)
- """All media's hash referenced in this comment"""
- files: [Hash!]!
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("BugConnection")
+ case "edges":
+ out.Values[i] = ec._BugConnection_edges(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "nodes":
+ out.Values[i] = ec._BugConnection_nodes(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "pageInfo":
+ out.Values[i] = ec._BugConnection_pageInfo(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "totalCount":
+ out.Values[i] = ec._BugConnection_totalCount(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type CommentConnection {
- edges: [CommentEdge!]!
- nodes: [Comment!]!
- pageInfo: PageInfo!
- totalCount: Int!
+var bugEdgeImplementors = []string{"BugEdge"}
+
+func (ec *executionContext) _BugEdge(ctx context.Context, sel ast.SelectionSet, obj *models.BugEdge) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, bugEdgeImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("BugEdge")
+ case "cursor":
+ out.Values[i] = ec._BugEdge_cursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "node":
+ out.Values[i] = ec._BugEdge_node(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type CommentEdge {
- cursor: String!
- node: Comment!
+var commentImplementors = []string{"Comment", "Authored"}
+
+func (ec *executionContext) _Comment(ctx context.Context, sel ast.SelectionSet, obj *bug.Comment) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, commentImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("Comment")
+ case "author":
+ out.Values[i] = ec._Comment_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "message":
+ out.Values[i] = ec._Comment_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "files":
+ out.Values[i] = ec._Comment_files(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-enum Status {
- OPEN
- CLOSED
+var commentConnectionImplementors = []string{"CommentConnection"}
+
+func (ec *executionContext) _CommentConnection(ctx context.Context, sel ast.SelectionSet, obj *models.CommentConnection) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, commentConnectionImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("CommentConnection")
+ case "edges":
+ out.Values[i] = ec._CommentConnection_edges(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "nodes":
+ out.Values[i] = ec._CommentConnection_nodes(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "pageInfo":
+ out.Values[i] = ec._CommentConnection_pageInfo(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "totalCount":
+ out.Values[i] = ec._CommentConnection_totalCount(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type Bug {
- """The identifier for this bug"""
- id: String!
- """The human version (truncated) identifier for this bug"""
- humanId: String!
- status: Status!
- title: String!
- labels: [Label!]!
- author: Identity!
- createdAt: Time!
- lastEdit: Time!
+var commentEdgeImplementors = []string{"CommentEdge"}
- """The actors of the bug. Actors are Identity that have interacted with the bug."""
- actors(
- """Returns the elements in the list that come after the specified cursor."""
- after: String
- """Returns the elements in the list that come before the specified cursor."""
- before: String
- """Returns the first _n_ elements from the list."""
- first: Int
- """Returns the last _n_ elements from the list."""
- last: Int
- ): IdentityConnection!
+func (ec *executionContext) _CommentEdge(ctx context.Context, sel ast.SelectionSet, obj *models.CommentEdge) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, commentEdgeImplementors)
- """The participants of the bug. Participants are Identity that have created or
- added a comment on the bug."""
- participants(
- """Returns the elements in the list that come after the specified cursor."""
- after: String
- """Returns the elements in the list that come before the specified cursor."""
- before: String
- """Returns the first _n_ elements from the list."""
- first: Int
- """Returns the last _n_ elements from the list."""
- last: Int
- ): IdentityConnection!
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("CommentEdge")
+ case "cursor":
+ out.Values[i] = ec._CommentEdge_cursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "node":
+ out.Values[i] = ec._CommentEdge_node(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
+}
- comments(
- """Returns the elements in the list that come after the specified cursor."""
- after: String
- """Returns the elements in the list that come before the specified cursor."""
- before: String
- """Returns the first _n_ elements from the list."""
- first: Int
- """Returns the last _n_ elements from the list."""
- last: Int
- ): CommentConnection!
+var commentHistoryStepImplementors = []string{"CommentHistoryStep"}
- timeline(
- """Returns the elements in the list that come after the specified cursor."""
- after: String
- """Returns the elements in the list that come before the specified cursor."""
- before: String
- """Returns the first _n_ elements from the list."""
- first: Int
- """Returns the last _n_ elements from the list."""
- last: Int
- ): TimelineItemConnection!
+func (ec *executionContext) _CommentHistoryStep(ctx context.Context, sel ast.SelectionSet, obj *bug.CommentHistoryStep) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, commentHistoryStepImplementors)
- operations(
- """Returns the elements in the list that come after the specified cursor."""
- after: String
- """Returns the elements in the list that come before the specified cursor."""
- before: String
- """Returns the first _n_ elements from the list."""
- first: Int
- """Returns the last _n_ elements from the list."""
- last: Int
- ): OperationConnection!
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("CommentHistoryStep")
+ case "message":
+ out.Values[i] = ec._CommentHistoryStep_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._CommentHistoryStep_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""The connection type for Bug."""
-type BugConnection {
- """A list of edges."""
- edges: [BugEdge!]!
- nodes: [Bug!]!
- """Information to aid in pagination."""
- pageInfo: PageInfo!
- """Identifies the total count of items in the connection."""
- totalCount: Int!
+var createOperationImplementors = []string{"CreateOperation", "Operation", "Authored"}
+
+func (ec *executionContext) _CreateOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.CreateOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, createOperationImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("CreateOperation")
+ case "hash":
+ out.Values[i] = ec._CreateOperation_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._CreateOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._CreateOperation_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "title":
+ out.Values[i] = ec._CreateOperation_title(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "message":
+ out.Values[i] = ec._CreateOperation_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "files":
+ out.Values[i] = ec._CreateOperation_files(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""An edge in a connection."""
-type BugEdge {
- """A cursor for use in pagination."""
- cursor: String!
- """The item at the end of the edge."""
- node: Bug!
+var createTimelineItemImplementors = []string{"CreateTimelineItem", "TimelineItem"}
+
+func (ec *executionContext) _CreateTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.CreateTimelineItem) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, createTimelineItemImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("CreateTimelineItem")
+ case "hash":
+ out.Values[i] = ec._CreateTimelineItem_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._CreateTimelineItem_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "message":
+ out.Values[i] = ec._CreateTimelineItem_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "messageIsEmpty":
+ out.Values[i] = ec._CreateTimelineItem_messageIsEmpty(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "files":
+ out.Values[i] = ec._CreateTimelineItem_files(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "createdAt":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._CreateTimelineItem_createdAt(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "lastEdit":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._CreateTimelineItem_lastEdit(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "edited":
+ out.Values[i] = ec._CreateTimelineItem_edited(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "history":
+ out.Values[i] = ec._CreateTimelineItem_history(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-`},
- &ast.Source{Name: "schema/identity.graphql", Input: `"""Represents an identity"""
-type Identity {
- """The identifier for this identity"""
- id: String!
- """The human version (truncated) identifier for this identity"""
- humanId: String!
- """The name of the person, if known."""
- name: String
- """The email of the person, if known."""
- email: String
- """The login of the person, if known."""
- login: String
- """A string containing the either the name of the person, its login or both"""
- displayName: String!
- """An url to an avatar"""
- avatarUrl: String
- """isProtected is true if the chain of git commits started to be signed.
- If that's the case, only signed commit with a valid key for this identity can be added."""
- isProtected: Boolean!
+var editCommentOperationImplementors = []string{"EditCommentOperation", "Operation", "Authored"}
+
+func (ec *executionContext) _EditCommentOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.EditCommentOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, editCommentOperationImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("EditCommentOperation")
+ case "hash":
+ out.Values[i] = ec._EditCommentOperation_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._EditCommentOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._EditCommentOperation_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "target":
+ out.Values[i] = ec._EditCommentOperation_target(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "message":
+ out.Values[i] = ec._EditCommentOperation_message(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "files":
+ out.Values[i] = ec._EditCommentOperation_files(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type IdentityConnection {
- edges: [IdentityEdge!]!
- nodes: [Identity!]!
- pageInfo: PageInfo!
- totalCount: Int!
+var identityImplementors = []string{"Identity"}
+
+func (ec *executionContext) _Identity(ctx context.Context, sel ast.SelectionSet, obj *identity.Interface) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, identityImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("Identity")
+ case "id":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Identity_id(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "humanId":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Identity_humanId(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "name":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Identity_name(ctx, field, obj)
+ return res
+ })
+ case "email":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Identity_email(ctx, field, obj)
+ return res
+ })
+ case "login":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Identity_login(ctx, field, obj)
+ return res
+ })
+ case "displayName":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Identity_displayName(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "avatarUrl":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Identity_avatarUrl(ctx, field, obj)
+ return res
+ })
+ case "isProtected":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Identity_isProtected(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type IdentityEdge {
- cursor: String!
- node: Identity!
-}`},
- &ast.Source{Name: "schema/operations.graphql", Input: `"""An operation applied to a bug."""
-interface Operation {
- """The hash of the operation"""
- hash: Hash!
- """The operations author."""
- author: Identity!
- """The datetime when this operation was issued."""
- date: Time!
+var identityConnectionImplementors = []string{"IdentityConnection"}
+
+func (ec *executionContext) _IdentityConnection(ctx context.Context, sel ast.SelectionSet, obj *models.IdentityConnection) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, identityConnectionImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("IdentityConnection")
+ case "edges":
+ out.Values[i] = ec._IdentityConnection_edges(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "nodes":
+ out.Values[i] = ec._IdentityConnection_nodes(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "pageInfo":
+ out.Values[i] = ec._IdentityConnection_pageInfo(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "totalCount":
+ out.Values[i] = ec._IdentityConnection_totalCount(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-# Connection
+var identityEdgeImplementors = []string{"IdentityEdge"}
-"""The connection type for an Operation"""
-type OperationConnection {
- edges: [OperationEdge!]!
- nodes: [Operation!]!
- pageInfo: PageInfo!
- totalCount: Int!
+func (ec *executionContext) _IdentityEdge(ctx context.Context, sel ast.SelectionSet, obj *models.IdentityEdge) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, identityEdgeImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("IdentityEdge")
+ case "cursor":
+ out.Values[i] = ec._IdentityEdge_cursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "node":
+ out.Values[i] = ec._IdentityEdge_node(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""Represent an Operation"""
-type OperationEdge {
- cursor: String!
- node: Operation!
+var labelChangeOperationImplementors = []string{"LabelChangeOperation", "Operation", "Authored"}
+
+func (ec *executionContext) _LabelChangeOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.LabelChangeOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, labelChangeOperationImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("LabelChangeOperation")
+ case "hash":
+ out.Values[i] = ec._LabelChangeOperation_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._LabelChangeOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._LabelChangeOperation_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "added":
+ out.Values[i] = ec._LabelChangeOperation_added(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "removed":
+ out.Values[i] = ec._LabelChangeOperation_removed(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-# Operations
+var labelChangeTimelineItemImplementors = []string{"LabelChangeTimelineItem", "TimelineItem"}
-type CreateOperation implements Operation & Authored {
- """The hash of the operation"""
- hash: Hash!
- """The author of this object."""
- author: Identity!
- """The datetime when this operation was issued."""
- date: Time!
+func (ec *executionContext) _LabelChangeTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.LabelChangeTimelineItem) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, labelChangeTimelineItemImplementors)
- title: String!
- message: String!
- files: [Hash!]!
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("LabelChangeTimelineItem")
+ case "hash":
+ out.Values[i] = ec._LabelChangeTimelineItem_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._LabelChangeTimelineItem_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._LabelChangeTimelineItem_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "added":
+ out.Values[i] = ec._LabelChangeTimelineItem_added(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "removed":
+ out.Values[i] = ec._LabelChangeTimelineItem_removed(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type SetTitleOperation implements Operation & Authored {
- """The hash of the operation"""
- hash: Hash!
- """The author of this object."""
- author: Identity!
- """The datetime when this operation was issued."""
- date: Time!
+var mutationImplementors = []string{"Mutation"}
- title: String!
- was: String!
+func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, mutationImplementors)
+
+ ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ Object: "Mutation",
+ })
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("Mutation")
+ case "newBug":
+ out.Values[i] = ec._Mutation_newBug(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "addComment":
+ out.Values[i] = ec._Mutation_addComment(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "changeLabels":
+ out.Values[i] = ec._Mutation_changeLabels(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "open":
+ out.Values[i] = ec._Mutation_open(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "close":
+ out.Values[i] = ec._Mutation_close(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "setTitle":
+ out.Values[i] = ec._Mutation_setTitle(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "commit":
+ out.Values[i] = ec._Mutation_commit(ctx, field)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type AddCommentOperation implements Operation & Authored {
- """The hash of the operation"""
- hash: Hash!
- """The author of this object."""
- author: Identity!
- """The datetime when this operation was issued."""
- date: Time!
+var operationConnectionImplementors = []string{"OperationConnection"}
- message: String!
- files: [Hash!]!
+func (ec *executionContext) _OperationConnection(ctx context.Context, sel ast.SelectionSet, obj *models.OperationConnection) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, operationConnectionImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("OperationConnection")
+ case "edges":
+ out.Values[i] = ec._OperationConnection_edges(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "nodes":
+ out.Values[i] = ec._OperationConnection_nodes(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "pageInfo":
+ out.Values[i] = ec._OperationConnection_pageInfo(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "totalCount":
+ out.Values[i] = ec._OperationConnection_totalCount(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type EditCommentOperation implements Operation & Authored {
- """The hash of the operation"""
- hash: Hash!
- """The author of this object."""
- author: Identity!
- """The datetime when this operation was issued."""
- date: Time!
+var operationEdgeImplementors = []string{"OperationEdge"}
- target: Hash!
- message: String!
- files: [Hash!]!
+func (ec *executionContext) _OperationEdge(ctx context.Context, sel ast.SelectionSet, obj *models.OperationEdge) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, operationEdgeImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("OperationEdge")
+ case "cursor":
+ out.Values[i] = ec._OperationEdge_cursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "node":
+ out.Values[i] = ec._OperationEdge_node(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type SetStatusOperation implements Operation & Authored {
- """The hash of the operation"""
- hash: Hash!
- """The author of this object."""
- author: Identity!
- """The datetime when this operation was issued."""
- date: Time!
+var pageInfoImplementors = []string{"PageInfo"}
- status: Status!
+func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, obj *models.PageInfo) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, pageInfoImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("PageInfo")
+ case "hasNextPage":
+ out.Values[i] = ec._PageInfo_hasNextPage(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "hasPreviousPage":
+ out.Values[i] = ec._PageInfo_hasPreviousPage(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "startCursor":
+ out.Values[i] = ec._PageInfo_startCursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "endCursor":
+ out.Values[i] = ec._PageInfo_endCursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type LabelChangeOperation implements Operation & Authored {
- """The hash of the operation"""
- hash: Hash!
- """The author of this object."""
- author: Identity!
- """The datetime when this operation was issued."""
- date: Time!
+var queryImplementors = []string{"Query"}
- added: [Label!]!
- removed: [Label!]!
+func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, queryImplementors)
+
+ ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ Object: "Query",
+ })
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("Query")
+ case "defaultRepository":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Query_defaultRepository(ctx, field)
+ return res
+ })
+ case "repository":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Query_repository(ctx, field)
+ return res
+ })
+ case "__type":
+ out.Values[i] = ec._Query___type(ctx, field)
+ case "__schema":
+ out.Values[i] = ec._Query___schema(ctx, field)
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-`},
- &ast.Source{Name: "schema/repository.graphql", Input: `
-type Repository {
- """All the bugs"""
- allBugs(
- """Returns the elements in the list that come after the specified cursor."""
- after: String
- """Returns the elements in the list that come before the specified cursor."""
- before: String
- """Returns the first _n_ elements from the list."""
- first: Int
- """Returns the last _n_ elements from the list."""
- last: Int
- """A query to select and order bugs"""
- query: String
- ): BugConnection!
- bug(prefix: String!): Bug
+var repositoryImplementors = []string{"Repository"}
- """All the identities"""
- allIdentities(
- """Returns the elements in the list that come after the specified cursor."""
- after: String
- """Returns the elements in the list that come before the specified cursor."""
- before: String
- """Returns the first _n_ elements from the list."""
- first: Int
- """Returns the last _n_ elements from the list."""
- last: Int
- ): IdentityConnection!
+func (ec *executionContext) _Repository(ctx context.Context, sel ast.SelectionSet, obj *models.Repository) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, repositoryImplementors)
- identity(prefix: String!):Identity
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("Repository")
+ case "allBugs":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Repository_allBugs(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "bug":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Repository_bug(ctx, field, obj)
+ return res
+ })
+ case "allIdentities":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Repository_allIdentities(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "identity":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Repository_identity(ctx, field, obj)
+ return res
+ })
+ case "userIdentity":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._Repository_userIdentity(ctx, field, obj)
+ return res
+ })
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
+}
- """The identity created or selected by the user as its own"""
- userIdentity:Identity
-}`},
- &ast.Source{Name: "schema/root.graphql", Input: `type Query {
- defaultRepository: Repository
- repository(id: String!): Repository
+var setStatusOperationImplementors = []string{"SetStatusOperation", "Operation", "Authored"}
+
+func (ec *executionContext) _SetStatusOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.SetStatusOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, setStatusOperationImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("SetStatusOperation")
+ case "hash":
+ out.Values[i] = ec._SetStatusOperation_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._SetStatusOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._SetStatusOperation_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "status":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._SetStatusOperation_status(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-type Mutation {
- newBug(repoRef: String, title: String!, message: String!, files: [Hash!]): Bug!
+var setStatusTimelineItemImplementors = []string{"SetStatusTimelineItem", "TimelineItem"}
- addComment(repoRef: String, prefix: String!, message: String!, files: [Hash!]): Bug!
- changeLabels(repoRef: String, prefix: String!, added: [String!], removed: [String!]): Bug!
- open(repoRef: String, prefix: String!): Bug!
- close(repoRef: String, prefix: String!): Bug!
- setTitle(repoRef: String, prefix: String!, title: String!): Bug!
+func (ec *executionContext) _SetStatusTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.SetStatusTimelineItem) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, setStatusTimelineItemImplementors)
- commit(repoRef: String, prefix: String!): Bug!
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("SetStatusTimelineItem")
+ case "hash":
+ out.Values[i] = ec._SetStatusTimelineItem_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._SetStatusTimelineItem_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._SetStatusTimelineItem_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "status":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._SetStatusTimelineItem_status(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-`},
- &ast.Source{Name: "schema/timeline.graphql", Input: `"""An item in the timeline of events"""
-interface TimelineItem {
- """The hash of the source operation"""
- hash: Hash!
+
+var setTitleOperationImplementors = []string{"SetTitleOperation", "Operation", "Authored"}
+
+func (ec *executionContext) _SetTitleOperation(ctx context.Context, sel ast.SelectionSet, obj *bug.SetTitleOperation) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, setTitleOperationImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("SetTitleOperation")
+ case "hash":
+ out.Values[i] = ec._SetTitleOperation_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._SetTitleOperation_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._SetTitleOperation_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "title":
+ out.Values[i] = ec._SetTitleOperation_title(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "was":
+ out.Values[i] = ec._SetTitleOperation_was(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""CommentHistoryStep hold one version of a message in the history"""
-type CommentHistoryStep {
- message: String!
- date: Time!
+var setTitleTimelineItemImplementors = []string{"SetTitleTimelineItem", "TimelineItem"}
+
+func (ec *executionContext) _SetTitleTimelineItem(ctx context.Context, sel ast.SelectionSet, obj *bug.SetTitleTimelineItem) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, setTitleTimelineItemImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("SetTitleTimelineItem")
+ case "hash":
+ out.Values[i] = ec._SetTitleTimelineItem_hash(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "author":
+ out.Values[i] = ec._SetTitleTimelineItem_author(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "date":
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._SetTitleTimelineItem_date(ctx, field, obj)
+ if res == graphql.Null {
+ invalid = true
+ }
+ return res
+ })
+ case "title":
+ out.Values[i] = ec._SetTitleTimelineItem_title(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "was":
+ out.Values[i] = ec._SetTitleTimelineItem_was(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-# Connection
+var timelineItemConnectionImplementors = []string{"TimelineItemConnection"}
-"""The connection type for TimelineItem"""
-type TimelineItemConnection {
- edges: [TimelineItemEdge!]!
- nodes: [TimelineItem!]!
- pageInfo: PageInfo!
- totalCount: Int!
+func (ec *executionContext) _TimelineItemConnection(ctx context.Context, sel ast.SelectionSet, obj *models.TimelineItemConnection) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, timelineItemConnectionImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("TimelineItemConnection")
+ case "edges":
+ out.Values[i] = ec._TimelineItemConnection_edges(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "nodes":
+ out.Values[i] = ec._TimelineItemConnection_nodes(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "pageInfo":
+ out.Values[i] = ec._TimelineItemConnection_pageInfo(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "totalCount":
+ out.Values[i] = ec._TimelineItemConnection_totalCount(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""Represent a TimelineItem"""
-type TimelineItemEdge {
- cursor: String!
- node: TimelineItem!
+var timelineItemEdgeImplementors = []string{"TimelineItemEdge"}
+
+func (ec *executionContext) _TimelineItemEdge(ctx context.Context, sel ast.SelectionSet, obj *models.TimelineItemEdge) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, timelineItemEdgeImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("TimelineItemEdge")
+ case "cursor":
+ out.Values[i] = ec._TimelineItemEdge_cursor(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "node":
+ out.Values[i] = ec._TimelineItemEdge_node(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-# Items
+var __DirectiveImplementors = []string{"__Directive"}
-"""CreateTimelineItem is a TimelineItem that represent the creation of a bug and its message edition history"""
-type CreateTimelineItem implements TimelineItem {
- """The hash of the source operation"""
- hash: Hash!
- author: Identity!
- message: String!
- messageIsEmpty: Boolean!
- files: [Hash!]!
- createdAt: Time!
- lastEdit: Time!
- edited: Boolean!
- history: [CommentHistoryStep!]!
+func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __DirectiveImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("__Directive")
+ case "name":
+ out.Values[i] = ec.___Directive_name(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "description":
+ out.Values[i] = ec.___Directive_description(ctx, field, obj)
+ case "locations":
+ out.Values[i] = ec.___Directive_locations(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "args":
+ out.Values[i] = ec.___Directive_args(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""AddCommentTimelineItem is a TimelineItem that represent a Comment and its edition history"""
-type AddCommentTimelineItem implements TimelineItem {
- """The hash of the source operation"""
- hash: Hash!
- author: Identity!
- message: String!
- messageIsEmpty: Boolean!
- files: [Hash!]!
- createdAt: Time!
- lastEdit: Time!
- edited: Boolean!
- history: [CommentHistoryStep!]!
+var __EnumValueImplementors = []string{"__EnumValue"}
+
+func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __EnumValueImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("__EnumValue")
+ case "name":
+ out.Values[i] = ec.___EnumValue_name(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "description":
+ out.Values[i] = ec.___EnumValue_description(ctx, field, obj)
+ case "isDeprecated":
+ out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "deprecationReason":
+ out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj)
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""LabelChangeTimelineItem is a TimelineItem that represent a change in the labels of a bug"""
-type LabelChangeTimelineItem implements TimelineItem {
- """The hash of the source operation"""
- hash: Hash!
- author: Identity!
- date: Time!
- added: [Label!]!
- removed: [Label!]!
+var __FieldImplementors = []string{"__Field"}
+
+func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __FieldImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("__Field")
+ case "name":
+ out.Values[i] = ec.___Field_name(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "description":
+ out.Values[i] = ec.___Field_description(ctx, field, obj)
+ case "args":
+ out.Values[i] = ec.___Field_args(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "type":
+ out.Values[i] = ec.___Field_type(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "isDeprecated":
+ out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "deprecationReason":
+ out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj)
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""SetStatusTimelineItem is a TimelineItem that represent a change in the status of a bug"""
-type SetStatusTimelineItem implements TimelineItem {
- """The hash of the source operation"""
- hash: Hash!
- author: Identity!
- date: Time!
- status: Status!
+var __InputValueImplementors = []string{"__InputValue"}
+
+func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __InputValueImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("__InputValue")
+ case "name":
+ out.Values[i] = ec.___InputValue_name(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "description":
+ out.Values[i] = ec.___InputValue_description(ctx, field, obj)
+ case "type":
+ out.Values[i] = ec.___InputValue_type(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "defaultValue":
+ out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj)
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""LabelChangeTimelineItem is a TimelineItem that represent a change in the title of a bug"""
-type SetTitleTimelineItem implements TimelineItem {
- """The hash of the source operation"""
- hash: Hash!
- author: Identity!
- date: Time!
- title: String!
- was: String!
+var __SchemaImplementors = []string{"__Schema"}
+
+func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __SchemaImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("__Schema")
+ case "types":
+ out.Values[i] = ec.___Schema_types(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "queryType":
+ out.Values[i] = ec.___Schema_queryType(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "mutationType":
+ out.Values[i] = ec.___Schema_mutationType(ctx, field, obj)
+ case "subscriptionType":
+ out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj)
+ case "directives":
+ out.Values[i] = ec.___Schema_directives(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-`},
- &ast.Source{Name: "schema/types.graphql", Input: `scalar Time
-scalar Label
-scalar Hash
-"""Information about pagination in a connection."""
-type PageInfo {
- """When paginating forwards, are there more items?"""
- hasNextPage: Boolean!
- """When paginating backwards, are there more items?"""
- hasPreviousPage: Boolean!
- """When paginating backwards, the cursor to continue."""
- startCursor: String!
- """When paginating forwards, the cursor to continue."""
- endCursor: String!
+var __TypeImplementors = []string{"__Type"}
+
+func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, __TypeImplementors)
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString("__Type")
+ case "kind":
+ out.Values[i] = ec.___Type_kind(ctx, field, obj)
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ case "name":
+ out.Values[i] = ec.___Type_name(ctx, field, obj)
+ case "description":
+ out.Values[i] = ec.___Type_description(ctx, field, obj)
+ case "fields":
+ out.Values[i] = ec.___Type_fields(ctx, field, obj)
+ case "interfaces":
+ out.Values[i] = ec.___Type_interfaces(ctx, field, obj)
+ case "possibleTypes":
+ out.Values[i] = ec.___Type_possibleTypes(ctx, field, obj)
+ case "enumValues":
+ out.Values[i] = ec.___Type_enumValues(ctx, field, obj)
+ case "inputFields":
+ out.Values[i] = ec.___Type_inputFields(ctx, field, obj)
+ case "ofType":
+ out.Values[i] = ec.___Type_ofType(ctx, field, obj)
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid {
+ return graphql.Null
+ }
+ return out
}
-"""An object that has an author."""
-interface Authored {
- """The author of this object."""
- author: Identity!
-}`},
-)
+// endregion **************************** object.gotpl ****************************
+
+// region ***************************** type.gotpl *****************************
+
+func (ec *executionContext) unmarshalNBoolean2bool(ctx context.Context, v interface{}) (bool, error) {
+ return graphql.UnmarshalBoolean(v)
+}
+
+func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler {
+ return graphql.MarshalBoolean(v)
+}
+
+func (ec *executionContext) marshalNBug2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx context.Context, sel ast.SelectionSet, v bug.Snapshot) graphql.Marshaler {
+ return ec._Bug(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNBug2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx context.Context, sel ast.SelectionSet, v []bug.Snapshot) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNBug2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalNBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx context.Context, sel ast.SelectionSet, v *bug.Snapshot) graphql.Marshaler {
+ if v == nil {
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ return ec._Bug(ctx, sel, v)
+}
+
+func (ec *executionContext) marshalNBugConnection2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐBugConnection(ctx context.Context, sel ast.SelectionSet, v models.BugConnection) graphql.Marshaler {
+ return ec._BugConnection(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNBugConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐBugConnection(ctx context.Context, sel ast.SelectionSet, v *models.BugConnection) graphql.Marshaler {
+ if v == nil {
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ return ec._BugConnection(ctx, sel, v)
+}
+
+func (ec *executionContext) marshalNBugEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐBugEdge(ctx context.Context, sel ast.SelectionSet, v models.BugEdge) graphql.Marshaler {
+ return ec._BugEdge(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNBugEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐBugEdge(ctx context.Context, sel ast.SelectionSet, v []models.BugEdge) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNBugEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐBugEdge(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalNComment2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐComment(ctx context.Context, sel ast.SelectionSet, v bug.Comment) graphql.Marshaler {
+ return ec._Comment(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNComment2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐComment(ctx context.Context, sel ast.SelectionSet, v []bug.Comment) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNComment2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐComment(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalNCommentConnection2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐCommentConnection(ctx context.Context, sel ast.SelectionSet, v models.CommentConnection) graphql.Marshaler {
+ return ec._CommentConnection(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNCommentConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐCommentConnection(ctx context.Context, sel ast.SelectionSet, v *models.CommentConnection) graphql.Marshaler {
+ if v == nil {
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ return ec._CommentConnection(ctx, sel, v)
+}
+
+func (ec *executionContext) marshalNCommentEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐCommentEdge(ctx context.Context, sel ast.SelectionSet, v models.CommentEdge) graphql.Marshaler {
+ return ec._CommentEdge(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNCommentEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐCommentEdge(ctx context.Context, sel ast.SelectionSet, v []models.CommentEdge) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNCommentEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐCommentEdge(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalNCommentHistoryStep2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐCommentHistoryStep(ctx context.Context, sel ast.SelectionSet, v bug.CommentHistoryStep) graphql.Marshaler {
+ return ec._CommentHistoryStep(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNCommentHistoryStep2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐCommentHistoryStep(ctx context.Context, sel ast.SelectionSet, v []bug.CommentHistoryStep) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNCommentHistoryStep2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐCommentHistoryStep(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) unmarshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx context.Context, v interface{}) (git.Hash, error) {
+ var res git.Hash
+ return res, res.UnmarshalGQL(v)
+}
+
+func (ec *executionContext) marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx context.Context, sel ast.SelectionSet, v git.Hash) graphql.Marshaler {
+ return v
+}
+
+func (ec *executionContext) unmarshalNHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx context.Context, v interface{}) ([]git.Hash, error) {
+ var vSlice []interface{}
+ if v != nil {
+ if tmp1, ok := v.([]interface{}); ok {
+ vSlice = tmp1
+ } else {
+ vSlice = []interface{}{v}
+ }
+ }
+ var err error
+ res := make([]git.Hash, len(vSlice))
+ for i := range vSlice {
+ res[i], err = ec.unmarshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, vSlice[i])
+ if err != nil {
+ return nil, err
+ }
+ }
+ return res, nil
+}
+
+func (ec *executionContext) marshalNHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx context.Context, sel ast.SelectionSet, v []git.Hash) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ for i := range v {
+ ret[i] = ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, sel, v[i])
+ }
+
+ return ret
+}
+
+func (ec *executionContext) marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx context.Context, sel ast.SelectionSet, v identity.Interface) graphql.Marshaler {
+ return ec._Identity(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNIdentity2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx context.Context, sel ast.SelectionSet, v []identity.Interface) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalNIdentityConnection2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐIdentityConnection(ctx context.Context, sel ast.SelectionSet, v models.IdentityConnection) graphql.Marshaler {
+ return ec._IdentityConnection(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNIdentityConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐIdentityConnection(ctx context.Context, sel ast.SelectionSet, v *models.IdentityConnection) graphql.Marshaler {
+ if v == nil {
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ return ec._IdentityConnection(ctx, sel, v)
+}
+
+func (ec *executionContext) marshalNIdentityEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐIdentityEdge(ctx context.Context, sel ast.SelectionSet, v models.IdentityEdge) graphql.Marshaler {
+ return ec._IdentityEdge(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNIdentityEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐIdentityEdge(ctx context.Context, sel ast.SelectionSet, v []models.IdentityEdge) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNIdentityEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐIdentityEdge(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) unmarshalNInt2int(ctx context.Context, v interface{}) (int, error) {
+ return graphql.UnmarshalInt(v)
+}
+
+func (ec *executionContext) marshalNInt2int(ctx context.Context, sel ast.SelectionSet, v int) graphql.Marshaler {
+ return graphql.MarshalInt(v)
+}
+
+func (ec *executionContext) unmarshalNLabel2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx context.Context, v interface{}) (bug.Label, error) {
+ var res bug.Label
+ return res, res.UnmarshalGQL(v)
+}
+
+func (ec *executionContext) marshalNLabel2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx context.Context, sel ast.SelectionSet, v bug.Label) graphql.Marshaler {
+ return v
+}
+
+func (ec *executionContext) unmarshalNLabel2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx context.Context, v interface{}) ([]bug.Label, error) {
+ var vSlice []interface{}
+ if v != nil {
+ if tmp1, ok := v.([]interface{}); ok {
+ vSlice = tmp1
+ } else {
+ vSlice = []interface{}{v}
+ }
+ }
+ var err error
+ res := make([]bug.Label, len(vSlice))
+ for i := range vSlice {
+ res[i], err = ec.unmarshalNLabel2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx, vSlice[i])
+ if err != nil {
+ return nil, err
+ }
+ }
+ return res, nil
+}
+
+func (ec *executionContext) marshalNLabel2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx context.Context, sel ast.SelectionSet, v []bug.Label) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ for i := range v {
+ ret[i] = ec.marshalNLabel2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐLabel(ctx, sel, v[i])
+ }
+
+ return ret
+}
+
+func (ec *executionContext) marshalNOperation2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐOperation(ctx context.Context, sel ast.SelectionSet, v bug.Operation) graphql.Marshaler {
+ return ec._Operation(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNOperation2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐOperation(ctx context.Context, sel ast.SelectionSet, v []bug.Operation) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNOperation2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐOperation(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalNOperationConnection2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐOperationConnection(ctx context.Context, sel ast.SelectionSet, v models.OperationConnection) graphql.Marshaler {
+ return ec._OperationConnection(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNOperationConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐOperationConnection(ctx context.Context, sel ast.SelectionSet, v *models.OperationConnection) graphql.Marshaler {
+ if v == nil {
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ return ec._OperationConnection(ctx, sel, v)
+}
+
+func (ec *executionContext) marshalNOperationEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐOperationEdge(ctx context.Context, sel ast.SelectionSet, v models.OperationEdge) graphql.Marshaler {
+ return ec._OperationEdge(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNOperationEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐOperationEdge(ctx context.Context, sel ast.SelectionSet, v []models.OperationEdge) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNOperationEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐOperationEdge(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalNPageInfo2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐPageInfo(ctx context.Context, sel ast.SelectionSet, v models.PageInfo) graphql.Marshaler {
+ return ec._PageInfo(ctx, sel, &v)
+}
+
+func (ec *executionContext) unmarshalNStatus2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐStatus(ctx context.Context, v interface{}) (models.Status, error) {
+ var res models.Status
+ return res, res.UnmarshalGQL(v)
+}
+
+func (ec *executionContext) marshalNStatus2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐStatus(ctx context.Context, sel ast.SelectionSet, v models.Status) graphql.Marshaler {
+ return v
+}
+
+func (ec *executionContext) unmarshalNString2string(ctx context.Context, v interface{}) (string, error) {
+ return graphql.UnmarshalString(v)
+}
+
+func (ec *executionContext) marshalNString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler {
+ return graphql.MarshalString(v)
+}
+
+func (ec *executionContext) unmarshalNTime2timeᚐTime(ctx context.Context, v interface{}) (time.Time, error) {
+ return graphql.UnmarshalTime(v)
+}
+
+func (ec *executionContext) marshalNTime2timeᚐTime(ctx context.Context, sel ast.SelectionSet, v time.Time) graphql.Marshaler {
+ if v.IsZero() {
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ return graphql.MarshalTime(v)
+}
+
+func (ec *executionContext) unmarshalNTime2ᚖtimeᚐTime(ctx context.Context, v interface{}) (*time.Time, error) {
+ if v == nil {
+ return nil, nil
+ }
+ res, err := ec.unmarshalNTime2timeᚐTime(ctx, v)
+ return &res, err
+}
+
+func (ec *executionContext) marshalNTime2ᚖtimeᚐTime(ctx context.Context, sel ast.SelectionSet, v *time.Time) graphql.Marshaler {
+ if v == nil {
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ return ec.marshalNTime2timeᚐTime(ctx, sel, *v)
+}
+
+func (ec *executionContext) marshalNTimelineItem2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐTimelineItem(ctx context.Context, sel ast.SelectionSet, v bug.TimelineItem) graphql.Marshaler {
+ return ec._TimelineItem(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNTimelineItem2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐTimelineItem(ctx context.Context, sel ast.SelectionSet, v []bug.TimelineItem) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNTimelineItem2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐTimelineItem(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalNTimelineItemConnection2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐTimelineItemConnection(ctx context.Context, sel ast.SelectionSet, v models.TimelineItemConnection) graphql.Marshaler {
+ return ec._TimelineItemConnection(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNTimelineItemConnection2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐTimelineItemConnection(ctx context.Context, sel ast.SelectionSet, v *models.TimelineItemConnection) graphql.Marshaler {
+ if v == nil {
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ return ec._TimelineItemConnection(ctx, sel, v)
+}
+
+func (ec *executionContext) marshalNTimelineItemEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐTimelineItemEdge(ctx context.Context, sel ast.SelectionSet, v models.TimelineItemEdge) graphql.Marshaler {
+ return ec._TimelineItemEdge(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalNTimelineItemEdge2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐTimelineItemEdge(ctx context.Context, sel ast.SelectionSet, v []models.TimelineItemEdge) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalNTimelineItemEdge2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐTimelineItemEdge(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler {
+ return ec.___Directive(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalN__Directive2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v []introspection.Directive) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) unmarshalN__DirectiveLocation2string(ctx context.Context, v interface{}) (string, error) {
+ return graphql.UnmarshalString(v)
+}
+
+func (ec *executionContext) marshalN__DirectiveLocation2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler {
+ return graphql.MarshalString(v)
+}
+
+func (ec *executionContext) unmarshalN__DirectiveLocation2ᚕstring(ctx context.Context, v interface{}) ([]string, error) {
+ var vSlice []interface{}
+ if v != nil {
+ if tmp1, ok := v.([]interface{}); ok {
+ vSlice = tmp1
+ } else {
+ vSlice = []interface{}{v}
+ }
+ }
+ var err error
+ res := make([]string, len(vSlice))
+ for i := range vSlice {
+ res[i], err = ec.unmarshalN__DirectiveLocation2string(ctx, vSlice[i])
+ if err != nil {
+ return nil, err
+ }
+ }
+ return res, nil
+}
+
+func (ec *executionContext) marshalN__DirectiveLocation2ᚕstring(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalN__DirectiveLocation2string(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalN__EnumValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx context.Context, sel ast.SelectionSet, v introspection.EnumValue) graphql.Marshaler {
+ return ec.___EnumValue(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalN__Field2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx context.Context, sel ast.SelectionSet, v introspection.Field) graphql.Marshaler {
+ return ec.___Field(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx context.Context, sel ast.SelectionSet, v introspection.InputValue) graphql.Marshaler {
+ return ec.___InputValue(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v introspection.Type) graphql.Marshaler {
+ return ec.___Type(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalN__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler {
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler {
+ if v == nil {
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ return graphql.Null
+ }
+ return ec.___Type(ctx, sel, v)
+}
+
+func (ec *executionContext) unmarshalN__TypeKind2string(ctx context.Context, v interface{}) (string, error) {
+ return graphql.UnmarshalString(v)
+}
+
+func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler {
+ return graphql.MarshalString(v)
+}
+
+func (ec *executionContext) unmarshalOBoolean2bool(ctx context.Context, v interface{}) (bool, error) {
+ return graphql.UnmarshalBoolean(v)
+}
+
+func (ec *executionContext) marshalOBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler {
+ return graphql.MarshalBoolean(v)
+}
+
+func (ec *executionContext) unmarshalOBoolean2ᚖbool(ctx context.Context, v interface{}) (*bool, error) {
+ if v == nil {
+ return nil, nil
+ }
+ res, err := ec.unmarshalOBoolean2bool(ctx, v)
+ return &res, err
+}
+
+func (ec *executionContext) marshalOBoolean2ᚖbool(ctx context.Context, sel ast.SelectionSet, v *bool) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ return ec.marshalOBoolean2bool(ctx, sel, *v)
+}
+
+func (ec *executionContext) marshalOBug2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx context.Context, sel ast.SelectionSet, v bug.Snapshot) graphql.Marshaler {
+ return ec._Bug(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalOBug2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋbugᚐSnapshot(ctx context.Context, sel ast.SelectionSet, v *bug.Snapshot) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ return ec._Bug(ctx, sel, v)
+}
+
+func (ec *executionContext) unmarshalOHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx context.Context, v interface{}) ([]git.Hash, error) {
+ var vSlice []interface{}
+ if v != nil {
+ if tmp1, ok := v.([]interface{}); ok {
+ vSlice = tmp1
+ } else {
+ vSlice = []interface{}{v}
+ }
+ }
+ var err error
+ res := make([]git.Hash, len(vSlice))
+ for i := range vSlice {
+ res[i], err = ec.unmarshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, vSlice[i])
+ if err != nil {
+ return nil, err
+ }
+ }
+ return res, nil
+}
+
+func (ec *executionContext) marshalOHash2ᚕgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx context.Context, sel ast.SelectionSet, v []git.Hash) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ ret := make(graphql.Array, len(v))
+ for i := range v {
+ ret[i] = ec.marshalNHash2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋutilᚋgitᚐHash(ctx, sel, v[i])
+ }
+
+ return ret
+}
+
+func (ec *executionContext) marshalOIdentity2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋidentityᚐInterface(ctx context.Context, sel ast.SelectionSet, v identity.Interface) graphql.Marshaler {
+ return ec._Identity(ctx, sel, &v)
+}
+
+func (ec *executionContext) unmarshalOInt2int(ctx context.Context, v interface{}) (int, error) {
+ return graphql.UnmarshalInt(v)
+}
+
+func (ec *executionContext) marshalOInt2int(ctx context.Context, sel ast.SelectionSet, v int) graphql.Marshaler {
+ return graphql.MarshalInt(v)
+}
+
+func (ec *executionContext) unmarshalOInt2ᚖint(ctx context.Context, v interface{}) (*int, error) {
+ if v == nil {
+ return nil, nil
+ }
+ res, err := ec.unmarshalOInt2int(ctx, v)
+ return &res, err
+}
+
+func (ec *executionContext) marshalOInt2ᚖint(ctx context.Context, sel ast.SelectionSet, v *int) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ return ec.marshalOInt2int(ctx, sel, *v)
+}
+
+func (ec *executionContext) marshalORepository2githubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐRepository(ctx context.Context, sel ast.SelectionSet, v models.Repository) graphql.Marshaler {
+ return ec._Repository(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalORepository2ᚖgithubᚗcomᚋMichaelMureᚋgitᚑbugᚋgraphqlᚋmodelsᚐRepository(ctx context.Context, sel ast.SelectionSet, v *models.Repository) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ return ec._Repository(ctx, sel, v)
+}
+
+func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) {
+ return graphql.UnmarshalString(v)
+}
+
+func (ec *executionContext) marshalOString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler {
+ return graphql.MarshalString(v)
+}
+
+func (ec *executionContext) unmarshalOString2ᚕstring(ctx context.Context, v interface{}) ([]string, error) {
+ var vSlice []interface{}
+ if v != nil {
+ if tmp1, ok := v.([]interface{}); ok {
+ vSlice = tmp1
+ } else {
+ vSlice = []interface{}{v}
+ }
+ }
+ var err error
+ res := make([]string, len(vSlice))
+ for i := range vSlice {
+ res[i], err = ec.unmarshalNString2string(ctx, vSlice[i])
+ if err != nil {
+ return nil, err
+ }
+ }
+ return res, nil
+}
+
+func (ec *executionContext) marshalOString2ᚕstring(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ ret := make(graphql.Array, len(v))
+ for i := range v {
+ ret[i] = ec.marshalNString2string(ctx, sel, v[i])
+ }
+
+ return ret
+}
+
+func (ec *executionContext) unmarshalOString2ᚖstring(ctx context.Context, v interface{}) (*string, error) {
+ if v == nil {
+ return nil, nil
+ }
+ res, err := ec.unmarshalOString2string(ctx, v)
+ return &res, err
+}
+
+func (ec *executionContext) marshalOString2ᚖstring(ctx context.Context, sel ast.SelectionSet, v *string) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ return ec.marshalOString2string(ctx, sel, *v)
+}
+
+func (ec *executionContext) marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalN__EnumValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalO__Field2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx context.Context, sel ast.SelectionSet, v []introspection.Field) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalN__Field2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalO__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalO__Schema2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx context.Context, sel ast.SelectionSet, v introspection.Schema) graphql.Marshaler {
+ return ec.___Schema(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx context.Context, sel ast.SelectionSet, v *introspection.Schema) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ return ec.___Schema(ctx, sel, v)
+}
+
+func (ec *executionContext) marshalO__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v introspection.Type) graphql.Marshaler {
+ return ec.___Type(ctx, sel, &v)
+}
+
+func (ec *executionContext) marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ ret := make(graphql.Array, len(v))
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ for i := range v {
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+
+ }
+ wg.Wait()
+ return ret
+}
+
+func (ec *executionContext) marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler {
+ if v == nil {
+ return graphql.Null
+ }
+ return ec.___Type(ctx, sel, v)
+}
+
+// endregion ***************************** type.gotpl *****************************
diff --git a/graphql/models/gen_models.go b/graphql/models/gen_models.go
index 172fe033..64997104 100644
--- a/graphql/models/gen_models.go
+++ b/graphql/models/gen_models.go
@@ -18,16 +18,21 @@ type Authored interface {
// The connection type for Bug.
type BugConnection struct {
- Edges []BugEdge `json:"edges"`
- Nodes []bug.Snapshot `json:"nodes"`
- PageInfo PageInfo `json:"pageInfo"`
- TotalCount int `json:"totalCount"`
+ // A list of edges.
+ Edges []BugEdge `json:"edges"`
+ Nodes []bug.Snapshot `json:"nodes"`
+ // Information to aid in pagination.
+ PageInfo PageInfo `json:"pageInfo"`
+ // Identifies the total count of items in the connection.
+ TotalCount int `json:"totalCount"`
}
// An edge in a connection.
type BugEdge struct {
- Cursor string `json:"cursor"`
- Node bug.Snapshot `json:"node"`
+ // A cursor for use in pagination.
+ Cursor string `json:"cursor"`
+ // The item at the end of the edge.
+ Node bug.Snapshot `json:"node"`
}
type CommentConnection struct {
@@ -70,10 +75,14 @@ type OperationEdge struct {
// Information about pagination in a connection.
type PageInfo struct {
- HasNextPage bool `json:"hasNextPage"`
- HasPreviousPage bool `json:"hasPreviousPage"`
- StartCursor string `json:"startCursor"`
- EndCursor string `json:"endCursor"`
+ // When paginating forwards, are there more items?
+ HasNextPage bool `json:"hasNextPage"`
+ // When paginating backwards, are there more items?
+ HasPreviousPage bool `json:"hasPreviousPage"`
+ // When paginating backwards, the cursor to continue.
+ StartCursor string `json:"startCursor"`
+ // When paginating forwards, the cursor to continue.
+ EndCursor string `json:"endCursor"`
}
// The connection type for TimelineItem
@@ -97,6 +106,11 @@ const (
StatusClosed Status = "CLOSED"
)
+var AllStatus = []Status{
+ StatusOpen,
+ StatusClosed,
+}
+
func (e Status) IsValid() bool {
switch e {
case StatusOpen, StatusClosed:
diff --git a/graphql/resolvers/bug.go b/graphql/resolvers/bug.go
index ef35853c..2ad2310b 100644
--- a/graphql/resolvers/bug.go
+++ b/graphql/resolvers/bug.go
@@ -19,7 +19,7 @@ func (bugResolver) Status(ctx context.Context, obj *bug.Snapshot) (models.Status
return convertStatus(obj.Status)
}
-func (bugResolver) Comments(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.CommentConnection, error) {
+func (bugResolver) Comments(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.CommentConnection, error) {
input := models.ConnectionInput{
Before: before,
After: after,
@@ -34,8 +34,8 @@ func (bugResolver) Comments(ctx context.Context, obj *bug.Snapshot, after *strin
}
}
- conMaker := func(edges []models.CommentEdge, nodes []bug.Comment, info models.PageInfo, totalCount int) (models.CommentConnection, error) {
- return models.CommentConnection{
+ conMaker := func(edges []models.CommentEdge, nodes []bug.Comment, info models.PageInfo, totalCount int) (*models.CommentConnection, error) {
+ return &models.CommentConnection{
Edges: edges,
Nodes: nodes,
PageInfo: info,
@@ -46,7 +46,7 @@ func (bugResolver) Comments(ctx context.Context, obj *bug.Snapshot, after *strin
return connections.CommentCon(obj.Comments, edger, conMaker, input)
}
-func (bugResolver) Operations(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.OperationConnection, error) {
+func (bugResolver) Operations(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.OperationConnection, error) {
input := models.ConnectionInput{
Before: before,
After: after,
@@ -61,8 +61,8 @@ func (bugResolver) Operations(ctx context.Context, obj *bug.Snapshot, after *str
}
}
- conMaker := func(edges []models.OperationEdge, nodes []bug.Operation, info models.PageInfo, totalCount int) (models.OperationConnection, error) {
- return models.OperationConnection{
+ conMaker := func(edges []models.OperationEdge, nodes []bug.Operation, info models.PageInfo, totalCount int) (*models.OperationConnection, error) {
+ return &models.OperationConnection{
Edges: edges,
Nodes: nodes,
PageInfo: info,
@@ -73,7 +73,7 @@ func (bugResolver) Operations(ctx context.Context, obj *bug.Snapshot, after *str
return connections.OperationCon(obj.Operations, edger, conMaker, input)
}
-func (bugResolver) Timeline(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.TimelineItemConnection, error) {
+func (bugResolver) Timeline(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.TimelineItemConnection, error) {
input := models.ConnectionInput{
Before: before,
After: after,
@@ -88,8 +88,8 @@ func (bugResolver) Timeline(ctx context.Context, obj *bug.Snapshot, after *strin
}
}
- conMaker := func(edges []models.TimelineItemEdge, nodes []bug.TimelineItem, info models.PageInfo, totalCount int) (models.TimelineItemConnection, error) {
- return models.TimelineItemConnection{
+ conMaker := func(edges []models.TimelineItemEdge, nodes []bug.TimelineItem, info models.PageInfo, totalCount int) (*models.TimelineItemConnection, error) {
+ return &models.TimelineItemConnection{
Edges: edges,
Nodes: nodes,
PageInfo: info,
@@ -100,11 +100,12 @@ func (bugResolver) Timeline(ctx context.Context, obj *bug.Snapshot, after *strin
return connections.TimelineItemCon(obj.Timeline, edger, conMaker, input)
}
-func (bugResolver) LastEdit(ctx context.Context, obj *bug.Snapshot) (time.Time, error) {
- return obj.LastEditTime(), nil
+func (bugResolver) LastEdit(ctx context.Context, obj *bug.Snapshot) (*time.Time, error) {
+ t := obj.LastEditTime()
+ return &t, nil
}
-func (bugResolver) Actors(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.IdentityConnection, error) {
+func (bugResolver) Actors(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.IdentityConnection, error) {
input := models.ConnectionInput{
Before: before,
After: after,
@@ -119,8 +120,8 @@ func (bugResolver) Actors(ctx context.Context, obj *bug.Snapshot, after *string,
}
}
- conMaker := func(edges []models.IdentityEdge, nodes []identity.Interface, info models.PageInfo, totalCount int) (models.IdentityConnection, error) {
- return models.IdentityConnection{
+ conMaker := func(edges []models.IdentityEdge, nodes []identity.Interface, info models.PageInfo, totalCount int) (*models.IdentityConnection, error) {
+ return &models.IdentityConnection{
Edges: edges,
Nodes: nodes,
PageInfo: info,
@@ -131,7 +132,7 @@ func (bugResolver) Actors(ctx context.Context, obj *bug.Snapshot, after *string,
return connections.IdentityCon(obj.Actors, edger, conMaker, input)
}
-func (bugResolver) Participants(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (models.IdentityConnection, error) {
+func (bugResolver) Participants(ctx context.Context, obj *bug.Snapshot, after *string, before *string, first *int, last *int) (*models.IdentityConnection, error) {
input := models.ConnectionInput{
Before: before,
After: after,
@@ -146,8 +147,8 @@ func (bugResolver) Participants(ctx context.Context, obj *bug.Snapshot, after *s
}
}
- conMaker := func(edges []models.IdentityEdge, nodes []identity.Interface, info models.PageInfo, totalCount int) (models.IdentityConnection, error) {
- return models.IdentityConnection{
+ conMaker := func(edges []models.IdentityEdge, nodes []identity.Interface, info models.PageInfo, totalCount int) (*models.IdentityConnection, error) {
+ return &models.IdentityConnection{
Edges: edges,
Nodes: nodes,
PageInfo: info,
diff --git a/graphql/resolvers/mutation.go b/graphql/resolvers/mutation.go
index 73d39da8..d10d2ea3 100644
--- a/graphql/resolvers/mutation.go
+++ b/graphql/resolvers/mutation.go
@@ -23,144 +23,130 @@ func (r mutationResolver) getRepo(repoRef *string) (*cache.RepoCache, error) {
return r.cache.DefaultRepo()
}
-func (r mutationResolver) NewBug(ctx context.Context, repoRef *string, title string, message string, files []git.Hash) (bug.Snapshot, error) {
+func (r mutationResolver) NewBug(ctx context.Context, repoRef *string, title string, message string, files []git.Hash) (*bug.Snapshot, error) {
repo, err := r.getRepo(repoRef)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
b, err := repo.NewBugWithFiles(title, message, files)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
- snap := b.Snapshot()
-
- return *snap, nil
+ return b.Snapshot(), nil
}
-func (r mutationResolver) Commit(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error) {
+func (r mutationResolver) Commit(ctx context.Context, repoRef *string, prefix string) (*bug.Snapshot, error) {
repo, err := r.getRepo(repoRef)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
b, err := repo.ResolveBugPrefix(prefix)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
err = b.Commit()
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
- snap := b.Snapshot()
-
- return *snap, nil
+ return b.Snapshot(), nil
}
-func (r mutationResolver) AddComment(ctx context.Context, repoRef *string, prefix string, message string, files []git.Hash) (bug.Snapshot, error) {
+func (r mutationResolver) AddComment(ctx context.Context, repoRef *string, prefix string, message string, files []git.Hash) (*bug.Snapshot, error) {
repo, err := r.getRepo(repoRef)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
b, err := repo.ResolveBugPrefix(prefix)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
_, err = b.AddCommentWithFiles(message, files)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
- snap := b.Snapshot()
-
- return *snap, nil
+ return b.Snapshot(), nil
}
-func (r mutationResolver) ChangeLabels(ctx context.Context, repoRef *string, prefix string, added []string, removed []string) (bug.Snapshot, error) {
+func (r mutationResolver) ChangeLabels(ctx context.Context, repoRef *string, prefix string, added []string, removed []string) (*bug.Snapshot, error) {
repo, err := r.getRepo(repoRef)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
b, err := repo.ResolveBugPrefix(prefix)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
_, _, err = b.ChangeLabels(added, removed)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
- snap := b.Snapshot()
-
- return *snap, nil
+ return b.Snapshot(), nil
}
-func (r mutationResolver) Open(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error) {
+func (r mutationResolver) Open(ctx context.Context, repoRef *string, prefix string) (*bug.Snapshot, error) {
repo, err := r.getRepo(repoRef)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
b, err := repo.ResolveBugPrefix(prefix)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
_, err = b.Open()
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
- snap := b.Snapshot()
-
- return *snap, nil
+ return b.Snapshot(), nil
}
-func (r mutationResolver) Close(ctx context.Context, repoRef *string, prefix string) (bug.Snapshot, error) {
+func (r mutationResolver) Close(ctx context.Context, repoRef *string, prefix string) (*bug.Snapshot, error) {
repo, err := r.getRepo(repoRef)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
b, err := repo.ResolveBugPrefix(prefix)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
_, err = b.Close()
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
- snap := b.Snapshot()
-
- return *snap, nil
+ return b.Snapshot(), nil
}
-func (r mutationResolver) SetTitle(ctx context.Context, repoRef *string, prefix string, title string) (bug.Snapshot, error) {
+func (r mutationResolver) SetTitle(ctx context.Context, repoRef *string, prefix string, title string) (*bug.Snapshot, error) {
repo, err := r.getRepo(repoRef)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
b, err := repo.ResolveBugPrefix(prefix)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
_, err = b.SetTitle(title)
if err != nil {
- return bug.Snapshot{}, err
+ return nil, err
}
- snap := b.Snapshot()
-
- return *snap, nil
+ return b.Snapshot(), nil
}
diff --git a/graphql/resolvers/operations.go b/graphql/resolvers/operations.go
index c8089ac1..90817567 100644
--- a/graphql/resolvers/operations.go
+++ b/graphql/resolvers/operations.go
@@ -11,32 +11,37 @@ import (
type createOperationResolver struct{}
-func (createOperationResolver) Date(ctx context.Context, obj *bug.CreateOperation) (time.Time, error) {
- return obj.Time(), nil
+func (createOperationResolver) Date(ctx context.Context, obj *bug.CreateOperation) (*time.Time, error) {
+ t := obj.Time()
+ return &t, nil
}
type addCommentOperationResolver struct{}
-func (addCommentOperationResolver) Date(ctx context.Context, obj *bug.AddCommentOperation) (time.Time, error) {
- return obj.Time(), nil
+func (addCommentOperationResolver) Date(ctx context.Context, obj *bug.AddCommentOperation) (*time.Time, error) {
+ t := obj.Time()
+ return &t, nil
}
type editCommentOperationResolver struct{}
-func (editCommentOperationResolver) Date(ctx context.Context, obj *bug.EditCommentOperation) (time.Time, error) {
- return obj.Time(), nil
+func (editCommentOperationResolver) Date(ctx context.Context, obj *bug.EditCommentOperation) (*time.Time, error) {
+ t := obj.Time()
+ return &t, nil
}
type labelChangeOperation struct{}
-func (labelChangeOperation) Date(ctx context.Context, obj *bug.LabelChangeOperation) (time.Time, error) {
- return obj.Time(), nil
+func (labelChangeOperation) Date(ctx context.Context, obj *bug.LabelChangeOperation) (*time.Time, error) {
+ t := obj.Time()
+ return &t, nil
}
type setStatusOperationResolver struct{}
-func (setStatusOperationResolver) Date(ctx context.Context, obj *bug.SetStatusOperation) (time.Time, error) {
- return obj.Time(), nil
+func (setStatusOperationResolver) Date(ctx context.Context, obj *bug.SetStatusOperation) (*time.Time, error) {
+ t := obj.Time()
+ return &t, nil
}
func (setStatusOperationResolver) Status(ctx context.Context, obj *bug.SetStatusOperation) (models.Status, error) {
@@ -45,8 +50,9 @@ func (setStatusOperationResolver) Status(ctx context.Context, obj *bug.SetStatus
type setTitleOperationResolver struct{}
-func (setTitleOperationResolver) Date(ctx context.Context, obj *bug.SetTitleOperation) (time.Time, error) {
- return obj.Time(), nil
+func (setTitleOperationResolver) Date(ctx context.Context, obj *bug.SetTitleOperation) (*time.Time, error) {
+ t := obj.Time()
+ return &t, nil
}
func convertStatus(status bug.Status) (models.Status, error) {
diff --git a/graphql/resolvers/repo.go b/graphql/resolvers/repo.go
index 9003fbf9..cf10e4ae 100644
--- a/graphql/resolvers/repo.go
+++ b/graphql/resolvers/repo.go
@@ -15,7 +15,7 @@ var _ graph.RepositoryResolver = &repoResolver{}
type repoResolver struct{}
-func (repoResolver) AllBugs(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int, queryStr *string) (models.BugConnection, error) {
+func (repoResolver) AllBugs(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int, queryStr *string) (*models.BugConnection, error) {
input := models.ConnectionInput{
Before: before,
After: after,
@@ -27,7 +27,7 @@ func (repoResolver) AllBugs(ctx context.Context, obj *models.Repository, after *
if queryStr != nil {
query2, err := cache.ParseQuery(*queryStr)
if err != nil {
- return models.BugConnection{}, err
+ return nil, err
}
query = query2
} else {
@@ -46,7 +46,7 @@ func (repoResolver) AllBugs(ctx context.Context, obj *models.Repository, after *
}
// The conMaker will finally load and compile bugs from git to replace the selected edges
- conMaker := func(lazyBugEdges []connections.LazyBugEdge, lazyNode []string, info models.PageInfo, totalCount int) (models.BugConnection, error) {
+ conMaker := func(lazyBugEdges []connections.LazyBugEdge, lazyNode []string, info models.PageInfo, totalCount int) (*models.BugConnection, error) {
edges := make([]models.BugEdge, len(lazyBugEdges))
nodes := make([]bug.Snapshot, len(lazyBugEdges))
@@ -54,7 +54,7 @@ func (repoResolver) AllBugs(ctx context.Context, obj *models.Repository, after *
b, err := obj.Repo.ResolveBug(lazyBugEdge.Id)
if err != nil {
- return models.BugConnection{}, err
+ return nil, err
}
snap := b.Snapshot()
@@ -66,7 +66,7 @@ func (repoResolver) AllBugs(ctx context.Context, obj *models.Repository, after *
nodes[i] = *snap
}
- return models.BugConnection{
+ return &models.BugConnection{
Edges: edges,
Nodes: nodes,
PageInfo: info,
@@ -87,7 +87,7 @@ func (repoResolver) Bug(ctx context.Context, obj *models.Repository, prefix stri
return b.Snapshot(), nil
}
-func (repoResolver) AllIdentities(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int) (models.IdentityConnection, error) {
+func (repoResolver) AllIdentities(ctx context.Context, obj *models.Repository, after *string, before *string, first *int, last *int) (*models.IdentityConnection, error) {
input := models.ConnectionInput{
Before: before,
After: after,
@@ -107,7 +107,7 @@ func (repoResolver) AllIdentities(ctx context.Context, obj *models.Repository, a
}
// The conMaker will finally load and compile identities from git to replace the selected edges
- conMaker := func(lazyIdentityEdges []connections.LazyIdentityEdge, lazyNode []string, info models.PageInfo, totalCount int) (models.IdentityConnection, error) {
+ conMaker := func(lazyIdentityEdges []connections.LazyIdentityEdge, lazyNode []string, info models.PageInfo, totalCount int) (*models.IdentityConnection, error) {
edges := make([]models.IdentityEdge, len(lazyIdentityEdges))
nodes := make([]identity.Interface, len(lazyIdentityEdges))
@@ -115,7 +115,7 @@ func (repoResolver) AllIdentities(ctx context.Context, obj *models.Repository, a
i, err := obj.Repo.ResolveIdentity(lazyIdentityEdge.Id)
if err != nil {
- return models.IdentityConnection{}, err
+ return nil, err
}
ii := identity.Interface(i.Identity)
@@ -127,7 +127,7 @@ func (repoResolver) AllIdentities(ctx context.Context, obj *models.Repository, a
nodes[k] = ii
}
- return models.IdentityConnection{
+ return &models.IdentityConnection{
Edges: edges,
Nodes: nodes,
PageInfo: info,
@@ -138,26 +138,22 @@ func (repoResolver) AllIdentities(ctx context.Context, obj *models.Repository, a
return connections.LazyIdentityCon(source, edger, conMaker, input)
}
-func (repoResolver) Identity(ctx context.Context, obj *models.Repository, prefix string) (*identity.Interface, error) {
+func (repoResolver) Identity(ctx context.Context, obj *models.Repository, prefix string) (identity.Interface, error) {
i, err := obj.Repo.ResolveIdentityPrefix(prefix)
if err != nil {
return nil, err
}
- ii := identity.Interface(i.Identity)
-
- return &ii, nil
+ return i.Identity, nil
}
-func (repoResolver) UserIdentity(ctx context.Context, obj *models.Repository) (*identity.Interface, error) {
+func (repoResolver) UserIdentity(ctx context.Context, obj *models.Repository) (identity.Interface, error) {
i, err := obj.Repo.GetUserIdentity()
if err != nil {
return nil, err
}
- ii := identity.Interface(i.Identity)
-
- return &ii, nil
+ return i.Identity, nil
}
diff --git a/graphql/resolvers/timeline.go b/graphql/resolvers/timeline.go
index 42e0a643..27f799ba 100644
--- a/graphql/resolvers/timeline.go
+++ b/graphql/resolvers/timeline.go
@@ -10,40 +10,47 @@ import (
type commentHistoryStepResolver struct{}
-func (commentHistoryStepResolver) Date(ctx context.Context, obj *bug.CommentHistoryStep) (time.Time, error) {
- return obj.UnixTime.Time(), nil
+func (commentHistoryStepResolver) Date(ctx context.Context, obj *bug.CommentHistoryStep) (*time.Time, error) {
+ t := obj.UnixTime.Time()
+ return &t, nil
}
type addCommentTimelineItemResolver struct{}
-func (addCommentTimelineItemResolver) CreatedAt(ctx context.Context, obj *bug.AddCommentTimelineItem) (time.Time, error) {
- return obj.CreatedAt.Time(), nil
+func (addCommentTimelineItemResolver) CreatedAt(ctx context.Context, obj *bug.AddCommentTimelineItem) (*time.Time, error) {
+ t := obj.CreatedAt.Time()
+ return &t, nil
}
-func (addCommentTimelineItemResolver) LastEdit(ctx context.Context, obj *bug.AddCommentTimelineItem) (time.Time, error) {
- return obj.LastEdit.Time(), nil
+func (addCommentTimelineItemResolver) LastEdit(ctx context.Context, obj *bug.AddCommentTimelineItem) (*time.Time, error) {
+ t := obj.LastEdit.Time()
+ return &t, nil
}
type createTimelineItemResolver struct{}
-func (createTimelineItemResolver) CreatedAt(ctx context.Context, obj *bug.CreateTimelineItem) (time.Time, error) {
- return obj.CreatedAt.Time(), nil
+func (createTimelineItemResolver) CreatedAt(ctx context.Context, obj *bug.CreateTimelineItem) (*time.Time, error) {
+ t := obj.CreatedAt.Time()
+ return &t, nil
}
-func (createTimelineItemResolver) LastEdit(ctx context.Context, obj *bug.CreateTimelineItem) (time.Time, error) {
- return obj.LastEdit.Time(), nil
+func (createTimelineItemResolver) LastEdit(ctx context.Context, obj *bug.CreateTimelineItem) (*time.Time, error) {
+ t := obj.LastEdit.Time()
+ return &t, nil
}
type labelChangeTimelineItem struct{}
-func (labelChangeTimelineItem) Date(ctx context.Context, obj *bug.LabelChangeTimelineItem) (time.Time, error) {
- return obj.UnixTime.Time(), nil
+func (labelChangeTimelineItem) Date(ctx context.Context, obj *bug.LabelChangeTimelineItem) (*time.Time, error) {
+ t := obj.UnixTime.Time()
+ return &t, nil
}
type setStatusTimelineItem struct{}
-func (setStatusTimelineItem) Date(ctx context.Context, obj *bug.SetStatusTimelineItem) (time.Time, error) {
- return obj.UnixTime.Time(), nil
+func (setStatusTimelineItem) Date(ctx context.Context, obj *bug.SetStatusTimelineItem) (*time.Time, error) {
+ t := obj.UnixTime.Time()
+ return &t, nil
}
func (setStatusTimelineItem) Status(ctx context.Context, obj *bug.SetStatusTimelineItem) (models.Status, error) {
@@ -52,6 +59,7 @@ func (setStatusTimelineItem) Status(ctx context.Context, obj *bug.SetStatusTimel
type setTitleTimelineItem struct{}
-func (setTitleTimelineItem) Date(ctx context.Context, obj *bug.SetTitleTimelineItem) (time.Time, error) {
- return obj.UnixTime.Time(), nil
+func (setTitleTimelineItem) Date(ctx context.Context, obj *bug.SetTitleTimelineItem) (*time.Time, error) {
+ t := obj.UnixTime.Time()
+ return &t, nil
}
diff --git a/vendor/github.com/99designs/gqlgen/api/generate.go b/vendor/github.com/99designs/gqlgen/api/generate.go
new file mode 100644
index 00000000..3dd083f5
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/api/generate.go
@@ -0,0 +1,76 @@
+package api
+
+import (
+ "syscall"
+
+ "github.com/99designs/gqlgen/codegen"
+ "github.com/99designs/gqlgen/codegen/config"
+ "github.com/99designs/gqlgen/plugin"
+ "github.com/99designs/gqlgen/plugin/modelgen"
+ "github.com/99designs/gqlgen/plugin/resolvergen"
+ "github.com/pkg/errors"
+ "golang.org/x/tools/go/packages"
+)
+
+func Generate(cfg *config.Config, option ...Option) error {
+ _ = syscall.Unlink(cfg.Exec.Filename)
+ _ = syscall.Unlink(cfg.Model.Filename)
+
+ plugins := []plugin.Plugin{
+ modelgen.New(),
+ resolvergen.New(),
+ }
+
+ for _, o := range option {
+ o(cfg, &plugins)
+ }
+
+ for _, p := range plugins {
+ if mut, ok := p.(plugin.ConfigMutator); ok {
+ err := mut.MutateConfig(cfg)
+ if err != nil {
+ return errors.Wrap(err, p.Name())
+ }
+ }
+ }
+ // Merge again now that the generated models have been injected into the typemap
+ data, err := codegen.BuildData(cfg)
+ if err != nil {
+ return errors.Wrap(err, "merging failed")
+ }
+
+ if err = codegen.GenerateCode(data); err != nil {
+ return errors.Wrap(err, "generating core failed")
+ }
+
+ for _, p := range plugins {
+ if mut, ok := p.(plugin.CodeGenerator); ok {
+ err := mut.GenerateCode(data)
+ if err != nil {
+ return errors.Wrap(err, p.Name())
+ }
+ }
+ }
+
+ if err := validate(cfg); err != nil {
+ return errors.Wrap(err, "validation failed")
+ }
+
+ return nil
+}
+
+func validate(cfg *config.Config) error {
+ roots := []string{cfg.Exec.ImportPath()}
+ if cfg.Model.IsDefined() {
+ roots = append(roots, cfg.Model.ImportPath())
+ }
+
+ if cfg.Resolver.IsDefined() {
+ roots = append(roots, cfg.Resolver.ImportPath())
+ }
+ _, err := packages.Load(&packages.Config{Mode: packages.LoadTypes | packages.LoadSyntax}, roots...)
+ if err != nil {
+ return errors.Wrap(err, "validation failed")
+ }
+ return nil
+}
diff --git a/vendor/github.com/99designs/gqlgen/api/option.go b/vendor/github.com/99designs/gqlgen/api/option.go
new file mode 100644
index 00000000..f7ba6774
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/api/option.go
@@ -0,0 +1,20 @@
+package api
+
+import (
+ "github.com/99designs/gqlgen/codegen/config"
+ "github.com/99designs/gqlgen/plugin"
+)
+
+type Option func(cfg *config.Config, plugins *[]plugin.Plugin)
+
+func NoPlugins() Option {
+ return func(cfg *config.Config, plugins *[]plugin.Plugin) {
+ *plugins = nil
+ }
+}
+
+func AddPlugin(p plugin.Plugin) Option {
+ return func(cfg *config.Config, plugins *[]plugin.Plugin) {
+ *plugins = append(*plugins, p)
+ }
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/ambient.go b/vendor/github.com/99designs/gqlgen/cmd/ambient.go
index c9909fcc..7838fdf1 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/ambient.go
+++ b/vendor/github.com/99designs/gqlgen/cmd/ambient.go
@@ -1,4 +1,4 @@
-package codegen
+package cmd
import (
// Import and ignore the ambient imports listed below so dependency managers
diff --git a/vendor/github.com/99designs/gqlgen/cmd/gen.go b/vendor/github.com/99designs/gqlgen/cmd/gen.go
index 3842f02b..c69858b4 100644
--- a/vendor/github.com/99designs/gqlgen/cmd/gen.go
+++ b/vendor/github.com/99designs/gqlgen/cmd/gen.go
@@ -2,10 +2,10 @@ package cmd
import (
"fmt"
- "io/ioutil"
"os"
- "github.com/99designs/gqlgen/codegen"
+ "github.com/99designs/gqlgen/api"
+ "github.com/99designs/gqlgen/codegen/config"
"github.com/pkg/errors"
"github.com/urfave/cli"
)
@@ -18,43 +18,27 @@ var genCmd = cli.Command{
cli.StringFlag{Name: "config, c", Usage: "the config filename"},
},
Action: func(ctx *cli.Context) {
- var config *codegen.Config
+ var cfg *config.Config
var err error
if configFilename := ctx.String("config"); configFilename != "" {
- config, err = codegen.LoadConfig(configFilename)
+ cfg, err = config.LoadConfig(configFilename)
if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
os.Exit(1)
}
} else {
- config, err = codegen.LoadConfigFromDefaultLocations()
+ cfg, err = config.LoadConfigFromDefaultLocations()
if os.IsNotExist(errors.Cause(err)) {
- config = codegen.DefaultConfig()
+ cfg = config.DefaultConfig()
} else if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
- os.Exit(1)
- }
- }
-
- for _, filename := range config.SchemaFilename {
- var schemaRaw []byte
- schemaRaw, err = ioutil.ReadFile(filename)
- if err != nil {
- fmt.Fprintln(os.Stderr, "unable to open schema: "+err.Error())
- os.Exit(1)
+ os.Exit(2)
}
- config.SchemaStr[filename] = string(schemaRaw)
- }
-
- if err = config.Check(); err != nil {
- fmt.Fprintln(os.Stderr, "invalid config format: "+err.Error())
- os.Exit(1)
}
- err = codegen.Generate(*config)
- if err != nil {
+ if err = api.Generate(cfg); err != nil {
fmt.Fprintln(os.Stderr, err.Error())
- os.Exit(2)
+ os.Exit(3)
}
},
}
diff --git a/vendor/github.com/99designs/gqlgen/cmd/init.go b/vendor/github.com/99designs/gqlgen/cmd/init.go
index 1e7c18b9..e07bed97 100644
--- a/vendor/github.com/99designs/gqlgen/cmd/init.go
+++ b/vendor/github.com/99designs/gqlgen/cmd/init.go
@@ -7,10 +7,13 @@ import (
"os"
"strings"
- "github.com/99designs/gqlgen/codegen"
+ "github.com/99designs/gqlgen/api"
+ "github.com/99designs/gqlgen/plugin/servergen"
+
+ "github.com/99designs/gqlgen/codegen/config"
"github.com/pkg/errors"
"github.com/urfave/cli"
- "gopkg.in/yaml.v2"
+ yaml "gopkg.in/yaml.v2"
)
var configComment = `
@@ -68,46 +71,27 @@ var initCmd = cli.Command{
},
}
-func GenerateGraphServer(config *codegen.Config, serverFilename string) {
- for _, filename := range config.SchemaFilename {
- schemaRaw, err := ioutil.ReadFile(filename)
- if err != nil {
- fmt.Fprintln(os.Stderr, "unable to open schema: "+err.Error())
- os.Exit(1)
- }
- config.SchemaStr[filename] = string(schemaRaw)
- }
-
- if err := config.Check(); err != nil {
- fmt.Fprintln(os.Stderr, "invalid config format: "+err.Error())
- os.Exit(1)
- }
-
- if err := codegen.Generate(*config); err != nil {
- fmt.Fprintln(os.Stderr, err.Error())
- os.Exit(1)
- }
-
- if err := codegen.GenerateServer(*config, serverFilename); err != nil {
+func GenerateGraphServer(cfg *config.Config, serverFilename string) {
+ err := api.Generate(cfg, api.AddPlugin(servergen.New(serverFilename)))
+ if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
- os.Exit(1)
}
fmt.Fprintf(os.Stdout, "Exec \"go run ./%s\" to start GraphQL server\n", serverFilename)
}
-func initConfig(ctx *cli.Context) *codegen.Config {
- var config *codegen.Config
+func initConfig(ctx *cli.Context) *config.Config {
+ var cfg *config.Config
var err error
configFilename := ctx.String("config")
if configFilename != "" {
- config, err = codegen.LoadConfig(configFilename)
+ cfg, err = config.LoadConfig(configFilename)
} else {
- config, err = codegen.LoadConfigFromDefaultLocations()
+ cfg, err = config.LoadConfigFromDefaultLocations()
}
- if config != nil {
- fmt.Fprintf(os.Stderr, "init failed: a configuration file already exists at %s\n", config.FilePath)
+ if cfg != nil {
+ fmt.Fprintf(os.Stderr, "init failed: a configuration file already exists\n")
os.Exit(1)
}
@@ -119,9 +103,9 @@ func initConfig(ctx *cli.Context) *codegen.Config {
if configFilename == "" {
configFilename = "gqlgen.yml"
}
- config = codegen.DefaultConfig()
+ cfg = config.DefaultConfig()
- config.Resolver = codegen.PackageConfig{
+ cfg.Resolver = config.PackageConfig{
Filename: "resolver.go",
Type: "Resolver",
}
@@ -129,23 +113,21 @@ func initConfig(ctx *cli.Context) *codegen.Config {
var buf bytes.Buffer
buf.WriteString(strings.TrimSpace(configComment))
buf.WriteString("\n\n")
- {
- var b []byte
- b, err = yaml.Marshal(config)
- if err != nil {
- fmt.Fprintln(os.Stderr, "unable to marshal yaml: "+err.Error())
- os.Exit(1)
- }
- buf.Write(b)
+ var b []byte
+ b, err = yaml.Marshal(cfg)
+ if err != nil {
+ fmt.Fprintln(os.Stderr, "unable to marshal yaml: "+err.Error())
+ os.Exit(1)
}
+ buf.Write(b)
err = ioutil.WriteFile(configFilename, buf.Bytes(), 0644)
if err != nil {
- fmt.Fprintln(os.Stderr, "unable to write config file: "+err.Error())
+ fmt.Fprintln(os.Stderr, "unable to write cfg file: "+err.Error())
os.Exit(1)
}
- return config
+ return cfg
}
func initSchema(schemaFilename string) {
diff --git a/vendor/github.com/99designs/gqlgen/cmd/root.go b/vendor/github.com/99designs/gqlgen/cmd/root.go
index 519c2e1a..dc2970ac 100644
--- a/vendor/github.com/99designs/gqlgen/cmd/root.go
+++ b/vendor/github.com/99designs/gqlgen/cmd/root.go
@@ -7,7 +7,6 @@ import (
"os"
"github.com/99designs/gqlgen/graphql"
- "github.com/99designs/gqlgen/internal/gopath"
"github.com/urfave/cli"
// Required since otherwise dep will prune away these unused packages before codegen has a chance to run
@@ -23,14 +22,6 @@ func Execute() {
app.Flags = genCmd.Flags
app.Version = graphql.Version
app.Before = func(context *cli.Context) error {
- pwd, err := os.Getwd()
- if err != nil {
- return fmt.Errorf("unable to determine current workding dir: %s\n", err.Error())
- }
-
- if !gopath.Contains(pwd) {
- return fmt.Errorf("gqlgen must be run from inside your $GOPATH\n")
- }
if context.Bool("verbose") {
log.SetFlags(0)
} else {
@@ -47,7 +38,7 @@ func Execute() {
}
if err := app.Run(os.Args); err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
+ fmt.Fprint(os.Stderr, err.Error())
os.Exit(1)
}
}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/args.go b/vendor/github.com/99designs/gqlgen/codegen/args.go
new file mode 100644
index 00000000..d1498bdd
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/args.go
@@ -0,0 +1,104 @@
+package codegen
+
+import (
+ "fmt"
+ "go/types"
+ "strings"
+
+ "github.com/99designs/gqlgen/codegen/config"
+ "github.com/99designs/gqlgen/codegen/templates"
+ "github.com/pkg/errors"
+ "github.com/vektah/gqlparser/ast"
+)
+
+type ArgSet struct {
+ Args []*FieldArgument
+ FuncDecl string
+}
+
+type FieldArgument struct {
+ *ast.ArgumentDefinition
+ TypeReference *config.TypeReference
+ VarName string // The name of the var in go
+ Object *Object // A link back to the parent object
+ Default interface{} // The default value
+ Directives []*Directive
+ Value interface{} // value set in Data
+}
+
+func (f *FieldArgument) Stream() bool {
+ return f.Object != nil && f.Object.Stream
+}
+
+func (b *builder) buildArg(obj *Object, arg *ast.ArgumentDefinition) (*FieldArgument, error) {
+ tr, err := b.Binder.TypeReference(arg.Type, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ argDirs, err := b.getDirectives(arg.Directives)
+ if err != nil {
+ return nil, err
+ }
+ newArg := FieldArgument{
+ ArgumentDefinition: arg,
+ TypeReference: tr,
+ Object: obj,
+ VarName: templates.ToGoPrivate(arg.Name),
+ Directives: argDirs,
+ }
+
+ if arg.DefaultValue != nil {
+ newArg.Default, err = arg.DefaultValue.Value(nil)
+ if err != nil {
+ return nil, errors.Errorf("default value is not valid: %s", err.Error())
+ }
+ }
+
+ return &newArg, nil
+}
+
+func (b *builder) bindArgs(field *Field, params *types.Tuple) error {
+ var newArgs []*FieldArgument
+
+nextArg:
+ for j := 0; j < params.Len(); j++ {
+ param := params.At(j)
+ for _, oldArg := range field.Args {
+ if strings.EqualFold(oldArg.Name, param.Name()) {
+ tr, err := b.Binder.TypeReference(oldArg.Type, param.Type())
+ if err != nil {
+ return err
+ }
+ oldArg.TypeReference = tr
+
+ newArgs = append(newArgs, oldArg)
+ continue nextArg
+ }
+ }
+
+ // no matching arg found, abort
+ return fmt.Errorf("arg %s not in schema", param.Name())
+ }
+
+ field.Args = newArgs
+ return nil
+}
+
+func (a *Data) Args() map[string][]*FieldArgument {
+ ret := map[string][]*FieldArgument{}
+ for _, o := range a.Objects {
+ for _, f := range o.Fields {
+ if len(f.Args) > 0 {
+ ret[f.ArgsFunc()] = f.Args
+ }
+ }
+ }
+
+ for _, d := range a.Directives {
+ if len(d.Args) > 0 {
+ ret[d.ArgsFunc()] = d.Args
+ }
+ }
+ return ret
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/args.gotpl b/vendor/github.com/99designs/gqlgen/codegen/args.gotpl
new file mode 100644
index 00000000..4c721218
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/args.gotpl
@@ -0,0 +1,43 @@
+{{ range $name, $args := .Args }}
+func (ec *executionContext) {{ $name }}(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
+ var err error
+ args := map[string]interface{}{}
+ {{- range $i, $arg := . }}
+ var arg{{$i}} {{ $arg.TypeReference.GO | ref}}
+ if tmp, ok := rawArgs[{{$arg.Name|quote}}]; ok {
+ {{- if $arg.Directives }}
+ getArg0 := func(ctx context.Context) (interface{}, error) { return ec.{{ $arg.TypeReference.UnmarshalFunc }}(ctx, tmp) }
+
+ {{- range $i, $directive := $arg.Directives }}
+ getArg{{add $i 1}} := func(ctx context.Context) (res interface{}, err error) {
+ {{- range $dArg := $directive.Args }}
+ {{- if and $dArg.TypeReference.IsPtr ( notNil "Value" $dArg ) }}
+ {{ $dArg.VarName }} := {{ $dArg.Value | dump }}
+ {{- end }}
+ {{- end }}
+ n := getArg{{$i}}
+ return ec.directives.{{$directive.Name|ucFirst}}({{$directive.ResolveArgs "tmp" "n" }})
+ }
+ {{- end }}
+
+ tmp, err = getArg{{$arg.Directives|len}}(ctx)
+ if err != nil {
+ return nil, err
+ }
+ if data, ok := tmp.({{ $arg.TypeReference.GO | ref }}) ; ok {
+ arg{{$i}} = data
+ } else {
+ return nil, fmt.Errorf(`unexpected type %T from directive, should be {{ $arg.TypeReference.GO }}`, tmp)
+ }
+ {{- else }}
+ arg{{$i}}, err = ec.{{ $arg.TypeReference.UnmarshalFunc }}(ctx, tmp)
+ if err != nil {
+ return nil, err
+ }
+ {{- end }}
+ }
+ args[{{$arg.Name|quote}}] = arg{{$i}}
+ {{- end }}
+ return args, nil
+}
+{{ end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/build.go b/vendor/github.com/99designs/gqlgen/codegen/build.go
deleted file mode 100644
index 582689a7..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/build.go
+++ /dev/null
@@ -1,194 +0,0 @@
-package codegen
-
-import (
- "fmt"
- "go/build"
- "go/types"
- "os"
-
- "github.com/pkg/errors"
- "golang.org/x/tools/go/loader"
-)
-
-type Build struct {
- PackageName string
- Objects Objects
- Inputs Objects
- Interfaces []*Interface
- QueryRoot *Object
- MutationRoot *Object
- SubscriptionRoot *Object
- SchemaRaw map[string]string
- SchemaFilename SchemaFilenames
- Directives []*Directive
-}
-
-type ModelBuild struct {
- PackageName string
- Models []Model
- Enums []Enum
-}
-
-type ResolverBuild struct {
- PackageName string
- ResolverType string
- Objects Objects
- ResolverFound bool
-}
-
-type ServerBuild struct {
- PackageName string
- ExecPackageName string
- ResolverPackageName string
-}
-
-// Create a list of models that need to be generated
-func (cfg *Config) models() (*ModelBuild, error) {
- namedTypes := cfg.buildNamedTypes()
-
- progLoader := cfg.newLoaderWithoutErrors()
-
- prog, err := progLoader.Load()
- if err != nil {
- return nil, errors.Wrap(err, "loading failed")
- }
-
- cfg.bindTypes(namedTypes, cfg.Model.Dir(), prog)
-
- models, err := cfg.buildModels(namedTypes, prog)
- if err != nil {
- return nil, err
- }
- return &ModelBuild{
- PackageName: cfg.Model.Package,
- Models: models,
- Enums: cfg.buildEnums(namedTypes),
- }, nil
-}
-
-// bind a schema together with some code to generate a Build
-func (cfg *Config) resolver() (*ResolverBuild, error) {
- progLoader := cfg.newLoaderWithoutErrors()
- progLoader.Import(cfg.Resolver.ImportPath())
-
- prog, err := progLoader.Load()
- if err != nil {
- return nil, err
- }
-
- destDir := cfg.Resolver.Dir()
-
- namedTypes := cfg.buildNamedTypes()
-
- cfg.bindTypes(namedTypes, destDir, prog)
-
- objects, err := cfg.buildObjects(namedTypes, prog)
- if err != nil {
- return nil, err
- }
-
- def, _ := findGoType(prog, cfg.Resolver.ImportPath(), cfg.Resolver.Type)
- resolverFound := def != nil
-
- return &ResolverBuild{
- PackageName: cfg.Resolver.Package,
- Objects: objects,
- ResolverType: cfg.Resolver.Type,
- ResolverFound: resolverFound,
- }, nil
-}
-
-func (cfg *Config) server(destDir string) *ServerBuild {
- return &ServerBuild{
- PackageName: cfg.Resolver.Package,
- ExecPackageName: cfg.Exec.ImportPath(),
- ResolverPackageName: cfg.Resolver.ImportPath(),
- }
-}
-
-// bind a schema together with some code to generate a Build
-func (cfg *Config) bind() (*Build, error) {
- namedTypes := cfg.buildNamedTypes()
-
- progLoader := cfg.newLoaderWithoutErrors()
- prog, err := progLoader.Load()
- if err != nil {
- return nil, errors.Wrap(err, "loading failed")
- }
-
- cfg.bindTypes(namedTypes, cfg.Exec.Dir(), prog)
-
- objects, err := cfg.buildObjects(namedTypes, prog)
- if err != nil {
- return nil, err
- }
-
- inputs, err := cfg.buildInputs(namedTypes, prog)
- if err != nil {
- return nil, err
- }
- directives, err := cfg.buildDirectives(namedTypes)
- if err != nil {
- return nil, err
- }
-
- b := &Build{
- PackageName: cfg.Exec.Package,
- Objects: objects,
- Interfaces: cfg.buildInterfaces(namedTypes, prog),
- Inputs: inputs,
- SchemaRaw: cfg.SchemaStr,
- SchemaFilename: cfg.SchemaFilename,
- Directives: directives,
- }
-
- if cfg.schema.Query != nil {
- b.QueryRoot = b.Objects.ByName(cfg.schema.Query.Name)
- } else {
- return b, fmt.Errorf("query entry point missing")
- }
-
- if cfg.schema.Mutation != nil {
- b.MutationRoot = b.Objects.ByName(cfg.schema.Mutation.Name)
- }
-
- if cfg.schema.Subscription != nil {
- b.SubscriptionRoot = b.Objects.ByName(cfg.schema.Subscription.Name)
- }
- return b, nil
-}
-
-func (cfg *Config) validate() error {
- progLoader := cfg.newLoaderWithErrors()
- _, err := progLoader.Load()
- return err
-}
-
-func (cfg *Config) newLoaderWithErrors() loader.Config {
- conf := loader.Config{}
-
- for _, pkg := range cfg.Models.referencedPackages() {
- conf.Import(pkg)
- }
- return conf
-}
-
-func (cfg *Config) newLoaderWithoutErrors() loader.Config {
- conf := cfg.newLoaderWithErrors()
- conf.AllowErrors = true
- conf.TypeChecker = types.Config{
- Error: func(e error) {},
- }
- return conf
-}
-
-func resolvePkg(pkgName string) (string, error) {
- cwd, _ := os.Getwd()
-
- pkg, err := build.Default.Import(pkgName, cwd, build.FindOnly)
- if err != nil {
- return "", err
- }
-
- return pkg.ImportPath, nil
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/codegen.go b/vendor/github.com/99designs/gqlgen/codegen/codegen.go
deleted file mode 100644
index 773e3db7..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/codegen.go
+++ /dev/null
@@ -1,179 +0,0 @@
-package codegen
-
-import (
- "log"
- "os"
- "path/filepath"
- "regexp"
- "syscall"
-
- "github.com/99designs/gqlgen/codegen/templates"
- "github.com/pkg/errors"
- "github.com/vektah/gqlparser"
- "github.com/vektah/gqlparser/ast"
- "github.com/vektah/gqlparser/gqlerror"
-)
-
-func Generate(cfg Config) error {
- if err := cfg.normalize(); err != nil {
- return err
- }
-
- _ = syscall.Unlink(cfg.Exec.Filename)
- _ = syscall.Unlink(cfg.Model.Filename)
-
- modelsBuild, err := cfg.models()
- if err != nil {
- return errors.Wrap(err, "model plan failed")
- }
- if len(modelsBuild.Models) > 0 || len(modelsBuild.Enums) > 0 {
- if err = templates.RenderToFile("models.gotpl", cfg.Model.Filename, modelsBuild); err != nil {
- return err
- }
-
- for _, model := range modelsBuild.Models {
- modelCfg := cfg.Models[model.GQLType]
- modelCfg.Model = cfg.Model.ImportPath() + "." + model.GoType
- cfg.Models[model.GQLType] = modelCfg
- }
-
- for _, enum := range modelsBuild.Enums {
- modelCfg := cfg.Models[enum.GQLType]
- modelCfg.Model = cfg.Model.ImportPath() + "." + enum.GoType
- cfg.Models[enum.GQLType] = modelCfg
- }
- }
-
- build, err := cfg.bind()
- if err != nil {
- return errors.Wrap(err, "exec plan failed")
- }
-
- if err := templates.RenderToFile("generated.gotpl", cfg.Exec.Filename, build); err != nil {
- return err
- }
-
- if cfg.Resolver.IsDefined() {
- if err := generateResolver(cfg); err != nil {
- return errors.Wrap(err, "generating resolver failed")
- }
- }
-
- if err := cfg.validate(); err != nil {
- return errors.Wrap(err, "validation failed")
- }
-
- return nil
-}
-
-func GenerateServer(cfg Config, filename string) error {
- if err := cfg.Exec.normalize(); err != nil {
- return errors.Wrap(err, "exec")
- }
- if err := cfg.Resolver.normalize(); err != nil {
- return errors.Wrap(err, "resolver")
- }
-
- serverFilename := abs(filename)
- serverBuild := cfg.server(filepath.Dir(serverFilename))
-
- if _, err := os.Stat(serverFilename); os.IsNotExist(errors.Cause(err)) {
- err = templates.RenderToFile("server.gotpl", serverFilename, serverBuild)
- if err != nil {
- return errors.Wrap(err, "generate server failed")
- }
- } else {
- log.Printf("Skipped server: %s already exists\n", serverFilename)
- }
- return nil
-}
-
-func generateResolver(cfg Config) error {
- resolverBuild, err := cfg.resolver()
- if err != nil {
- return errors.Wrap(err, "resolver build failed")
- }
- filename := cfg.Resolver.Filename
-
- if resolverBuild.ResolverFound {
- log.Printf("Skipped resolver: %s.%s already exists\n", cfg.Resolver.ImportPath(), cfg.Resolver.Type)
- return nil
- }
-
- if _, err := os.Stat(filename); os.IsNotExist(errors.Cause(err)) {
- if err := templates.RenderToFile("resolver.gotpl", filename, resolverBuild); err != nil {
- return err
- }
- } else {
- log.Printf("Skipped resolver: %s already exists\n", filename)
- }
-
- return nil
-}
-
-func (cfg *Config) normalize() error {
- if err := cfg.Model.normalize(); err != nil {
- return errors.Wrap(err, "model")
- }
-
- if err := cfg.Exec.normalize(); err != nil {
- return errors.Wrap(err, "exec")
- }
-
- if cfg.Resolver.IsDefined() {
- if err := cfg.Resolver.normalize(); err != nil {
- return errors.Wrap(err, "resolver")
- }
- }
-
- builtins := TypeMap{
- "__Directive": {Model: "github.com/99designs/gqlgen/graphql/introspection.Directive"},
- "__Type": {Model: "github.com/99designs/gqlgen/graphql/introspection.Type"},
- "__Field": {Model: "github.com/99designs/gqlgen/graphql/introspection.Field"},
- "__EnumValue": {Model: "github.com/99designs/gqlgen/graphql/introspection.EnumValue"},
- "__InputValue": {Model: "github.com/99designs/gqlgen/graphql/introspection.InputValue"},
- "__Schema": {Model: "github.com/99designs/gqlgen/graphql/introspection.Schema"},
- "Int": {Model: "github.com/99designs/gqlgen/graphql.Int"},
- "Float": {Model: "github.com/99designs/gqlgen/graphql.Float"},
- "String": {Model: "github.com/99designs/gqlgen/graphql.String"},
- "Boolean": {Model: "github.com/99designs/gqlgen/graphql.Boolean"},
- "ID": {Model: "github.com/99designs/gqlgen/graphql.ID"},
- "Time": {Model: "github.com/99designs/gqlgen/graphql.Time"},
- "Map": {Model: "github.com/99designs/gqlgen/graphql.Map"},
- }
-
- if cfg.Models == nil {
- cfg.Models = TypeMap{}
- }
- for typeName, entry := range builtins {
- if !cfg.Models.Exists(typeName) {
- cfg.Models[typeName] = entry
- }
- }
-
- var sources []*ast.Source
- for _, filename := range cfg.SchemaFilename {
- sources = append(sources, &ast.Source{Name: filename, Input: cfg.SchemaStr[filename]})
- }
-
- var err *gqlerror.Error
- cfg.schema, err = gqlparser.LoadSchema(sources...)
- if err != nil {
- return err
- }
- return nil
-}
-
-var invalidPackageNameChar = regexp.MustCompile(`[^\w]`)
-
-func sanitizePackageName(pkg string) string {
- return invalidPackageNameChar.ReplaceAllLiteralString(filepath.Base(pkg), "_")
-}
-
-func abs(path string) string {
- absPath, err := filepath.Abs(path)
- if err != nil {
- panic(err)
- }
- return filepath.ToSlash(absPath)
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/complexity.go b/vendor/github.com/99designs/gqlgen/codegen/complexity.go
new file mode 100644
index 00000000..66d21a84
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/complexity.go
@@ -0,0 +1,11 @@
+package codegen
+
+func (o *Object) UniqueFields() map[string]*Field {
+ m := map[string]*Field{}
+
+ for _, f := range o.Fields {
+ m[f.GoFieldName] = f
+ }
+
+ return m
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/config.go b/vendor/github.com/99designs/gqlgen/codegen/config.go
deleted file mode 100644
index f9df24fb..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/config.go
+++ /dev/null
@@ -1,273 +0,0 @@
-package codegen
-
-import (
- "fmt"
- "go/build"
- "io/ioutil"
- "os"
- "path/filepath"
- "sort"
- "strings"
-
- "github.com/99designs/gqlgen/internal/gopath"
- "github.com/pkg/errors"
- "github.com/vektah/gqlparser/ast"
- "gopkg.in/yaml.v2"
-)
-
-var cfgFilenames = []string{".gqlgen.yml", "gqlgen.yml", "gqlgen.yaml"}
-
-// DefaultConfig creates a copy of the default config
-func DefaultConfig() *Config {
- return &Config{
- SchemaFilename: SchemaFilenames{"schema.graphql"},
- SchemaStr: map[string]string{},
- Model: PackageConfig{Filename: "models_gen.go"},
- Exec: PackageConfig{Filename: "generated.go"},
- }
-}
-
-// LoadConfigFromDefaultLocations looks for a config file in the current directory, and all parent directories
-// walking up the tree. The closest config file will be returned.
-func LoadConfigFromDefaultLocations() (*Config, error) {
- cfgFile, err := findCfg()
- if err != nil {
- return nil, err
- }
-
- err = os.Chdir(filepath.Dir(cfgFile))
- if err != nil {
- return nil, errors.Wrap(err, "unable to enter config dir")
- }
- return LoadConfig(cfgFile)
-}
-
-// LoadConfig reads the gqlgen.yml config file
-func LoadConfig(filename string) (*Config, error) {
- config := DefaultConfig()
-
- b, err := ioutil.ReadFile(filename)
- if err != nil {
- return nil, errors.Wrap(err, "unable to read config")
- }
-
- if err := yaml.UnmarshalStrict(b, config); err != nil {
- return nil, errors.Wrap(err, "unable to parse config")
- }
-
- preGlobbing := config.SchemaFilename
- config.SchemaFilename = SchemaFilenames{}
- for _, f := range preGlobbing {
- matches, err := filepath.Glob(f)
- if err != nil {
- return nil, errors.Wrapf(err, "failed to glob schema filename %s", f)
- }
-
- for _, m := range matches {
- if config.SchemaFilename.Has(m) {
- continue
- }
- config.SchemaFilename = append(config.SchemaFilename, m)
- }
- }
-
- config.FilePath = filename
- config.SchemaStr = map[string]string{}
-
- return config, nil
-}
-
-type Config struct {
- SchemaFilename SchemaFilenames `yaml:"schema,omitempty"`
- SchemaStr map[string]string `yaml:"-"`
- Exec PackageConfig `yaml:"exec"`
- Model PackageConfig `yaml:"model"`
- Resolver PackageConfig `yaml:"resolver,omitempty"`
- Models TypeMap `yaml:"models,omitempty"`
- StructTag string `yaml:"struct_tag,omitempty"`
-
- FilePath string `yaml:"-"`
-
- schema *ast.Schema `yaml:"-"`
-}
-
-type PackageConfig struct {
- Filename string `yaml:"filename,omitempty"`
- Package string `yaml:"package,omitempty"`
- Type string `yaml:"type,omitempty"`
-}
-
-type TypeMapEntry struct {
- Model string `yaml:"model"`
- Fields map[string]TypeMapField `yaml:"fields,omitempty"`
-}
-
-type TypeMapField struct {
- Resolver bool `yaml:"resolver"`
- FieldName string `yaml:"fieldName"`
-}
-
-type SchemaFilenames []string
-
-func (a *SchemaFilenames) UnmarshalYAML(unmarshal func(interface{}) error) error {
- var single string
- err := unmarshal(&single)
- if err == nil {
- *a = []string{single}
- return nil
- }
-
- var multi []string
- err = unmarshal(&multi)
- if err != nil {
- return err
- }
-
- *a = multi
- return nil
-}
-
-func (a SchemaFilenames) Has(file string) bool {
- for _, existing := range a {
- if existing == file {
- return true
- }
- }
- return false
-}
-
-func (c *PackageConfig) normalize() error {
- if c.Filename == "" {
- return errors.New("Filename is required")
- }
- c.Filename = abs(c.Filename)
- // If Package is not set, first attempt to load the package at the output dir. If that fails
- // fallback to just the base dir name of the output filename.
- if c.Package == "" {
- cwd, _ := os.Getwd()
- pkg, _ := build.Default.Import(c.ImportPath(), cwd, 0)
- if pkg.Name != "" {
- c.Package = pkg.Name
- } else {
- c.Package = filepath.Base(c.Dir())
- }
- }
- c.Package = sanitizePackageName(c.Package)
- return nil
-}
-
-func (c *PackageConfig) ImportPath() string {
- return gopath.MustDir2Import(c.Dir())
-}
-
-func (c *PackageConfig) Dir() string {
- return filepath.Dir(c.Filename)
-}
-
-func (c *PackageConfig) Check() error {
- if strings.ContainsAny(c.Package, "./\\") {
- return fmt.Errorf("package should be the output package name only, do not include the output filename")
- }
- if c.Filename != "" && !strings.HasSuffix(c.Filename, ".go") {
- return fmt.Errorf("filename should be path to a go source file")
- }
- return nil
-}
-
-func (c *PackageConfig) IsDefined() bool {
- return c.Filename != ""
-}
-
-func (cfg *Config) Check() error {
- if err := cfg.Models.Check(); err != nil {
- return errors.Wrap(err, "config.models")
- }
- if err := cfg.Exec.Check(); err != nil {
- return errors.Wrap(err, "config.exec")
- }
- if err := cfg.Model.Check(); err != nil {
- return errors.Wrap(err, "config.model")
- }
- if err := cfg.Resolver.Check(); err != nil {
- return errors.Wrap(err, "config.resolver")
- }
- return nil
-}
-
-type TypeMap map[string]TypeMapEntry
-
-func (tm TypeMap) Exists(typeName string) bool {
- _, ok := tm[typeName]
- return ok
-}
-
-func (tm TypeMap) Check() error {
- for typeName, entry := range tm {
- if strings.LastIndex(entry.Model, ".") < strings.LastIndex(entry.Model, "/") {
- return fmt.Errorf("model %s: invalid type specifier \"%s\" - you need to specify a struct to map to", typeName, entry.Model)
- }
- }
- return nil
-}
-
-func (tm TypeMap) referencedPackages() []string {
- var pkgs []string
-
- for _, typ := range tm {
- if typ.Model == "map[string]interface{}" {
- continue
- }
- pkg, _ := pkgAndType(typ.Model)
- if pkg == "" || inStrSlice(pkgs, pkg) {
- continue
- }
- pkgs = append(pkgs, pkg)
- }
-
- sort.Slice(pkgs, func(i, j int) bool {
- return pkgs[i] > pkgs[j]
- })
- return pkgs
-}
-
-func inStrSlice(haystack []string, needle string) bool {
- for _, v := range haystack {
- if needle == v {
- return true
- }
- }
-
- return false
-}
-
-// findCfg searches for the config file in this directory and all parents up the tree
-// looking for the closest match
-func findCfg() (string, error) {
- dir, err := os.Getwd()
- if err != nil {
- return "", errors.Wrap(err, "unable to get working dir to findCfg")
- }
-
- cfg := findCfgInDir(dir)
-
- for cfg == "" && dir != filepath.Dir(dir) {
- dir = filepath.Dir(dir)
- cfg = findCfgInDir(dir)
- }
-
- if cfg == "" {
- return "", os.ErrNotExist
- }
-
- return cfg, nil
-}
-
-func findCfgInDir(dir string) string {
- for _, cfgName := range cfgFilenames {
- path := filepath.Join(dir, cfgName)
- if _, err := os.Stat(path); err == nil {
- return path
- }
- }
- return ""
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/config/binder.go b/vendor/github.com/99designs/gqlgen/codegen/config/binder.go
new file mode 100644
index 00000000..f3956387
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/config/binder.go
@@ -0,0 +1,451 @@
+package config
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+
+ "github.com/99designs/gqlgen/codegen/templates"
+ "github.com/99designs/gqlgen/internal/code"
+ "github.com/pkg/errors"
+ "github.com/vektah/gqlparser/ast"
+ "golang.org/x/tools/go/packages"
+)
+
+// Binder connects graphql types to golang types using static analysis
+type Binder struct {
+ pkgs []*packages.Package
+ schema *ast.Schema
+ cfg *Config
+ References []*TypeReference
+}
+
+func (c *Config) NewBinder(s *ast.Schema) (*Binder, error) {
+ pkgs, err := packages.Load(&packages.Config{Mode: packages.LoadTypes | packages.LoadSyntax}, c.Models.ReferencedPackages()...)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, p := range pkgs {
+ for _, e := range p.Errors {
+ if e.Kind == packages.ListError {
+ return nil, p.Errors[0]
+ }
+ }
+ }
+
+ return &Binder{
+ pkgs: pkgs,
+ schema: s,
+ cfg: c,
+ }, nil
+}
+
+func (b *Binder) TypePosition(typ types.Type) token.Position {
+ named, isNamed := typ.(*types.Named)
+ if !isNamed {
+ return token.Position{
+ Filename: "unknown",
+ }
+ }
+
+ return b.ObjectPosition(named.Obj())
+}
+
+func (b *Binder) ObjectPosition(typ types.Object) token.Position {
+ if typ == nil {
+ return token.Position{
+ Filename: "unknown",
+ }
+ }
+ pkg := b.getPkg(typ.Pkg().Path())
+ return pkg.Fset.Position(typ.Pos())
+}
+
+func (b *Binder) FindType(pkgName string, typeName string) (types.Type, error) {
+ obj, err := b.FindObject(pkgName, typeName)
+ if err != nil {
+ return nil, err
+ }
+
+ if fun, isFunc := obj.(*types.Func); isFunc {
+ return fun.Type().(*types.Signature).Params().At(0).Type(), nil
+ }
+ return obj.Type(), nil
+}
+
+func (b *Binder) getPkg(find string) *packages.Package {
+ for _, p := range b.pkgs {
+ if code.NormalizeVendor(find) == code.NormalizeVendor(p.PkgPath) {
+ return p
+ }
+ }
+ return nil
+}
+
+var MapType = types.NewMap(types.Typ[types.String], types.NewInterfaceType(nil, nil).Complete())
+var InterfaceType = types.NewInterfaceType(nil, nil)
+
+func (b *Binder) DefaultUserObject(name string) (types.Type, error) {
+ models := b.cfg.Models[name].Model
+ if len(models) == 0 {
+ return nil, fmt.Errorf(name + " not found in typemap")
+ }
+
+ if models[0] == "map[string]interface{}" {
+ return MapType, nil
+ }
+
+ if models[0] == "interface{}" {
+ return InterfaceType, nil
+ }
+
+ pkgName, typeName := code.PkgAndType(models[0])
+ if pkgName == "" {
+ return nil, fmt.Errorf("missing package name for %s", name)
+ }
+
+ obj, err := b.FindObject(pkgName, typeName)
+ if err != nil {
+ return nil, err
+ }
+
+ return obj.Type(), nil
+}
+
+func (b *Binder) FindObject(pkgName string, typeName string) (types.Object, error) {
+ if pkgName == "" {
+ return nil, fmt.Errorf("package cannot be nil")
+ }
+ fullName := typeName
+ if pkgName != "" {
+ fullName = pkgName + "." + typeName
+ }
+
+ pkg := b.getPkg(pkgName)
+ if pkg == nil {
+ return nil, errors.Errorf("required package was not loaded: %s", fullName)
+ }
+
+ // function based marshalers take precedence
+ for astNode, def := range pkg.TypesInfo.Defs {
+ // only look at defs in the top scope
+ if def == nil || def.Parent() == nil || def.Parent() != pkg.Types.Scope() {
+ continue
+ }
+
+ if astNode.Name == "Marshal"+typeName {
+ return def, nil
+ }
+ }
+
+ // then look for types directly
+ for astNode, def := range pkg.TypesInfo.Defs {
+ // only look at defs in the top scope
+ if def == nil || def.Parent() == nil || def.Parent() != pkg.Types.Scope() {
+ continue
+ }
+
+ if astNode.Name == typeName {
+ return def, nil
+ }
+ }
+
+ return nil, errors.Errorf("unable to find type %s\n", fullName)
+}
+
+func (b *Binder) PointerTo(ref *TypeReference) *TypeReference {
+ newRef := &TypeReference{
+ GO: types.NewPointer(ref.GO),
+ GQL: ref.GQL,
+ CastType: ref.CastType,
+ Definition: ref.Definition,
+ Unmarshaler: ref.Unmarshaler,
+ Marshaler: ref.Marshaler,
+ IsMarshaler: ref.IsMarshaler,
+ }
+
+ b.References = append(b.References, newRef)
+ return newRef
+}
+
+// TypeReference is used by args and field types. The Definition can refer to both input and output types.
+type TypeReference struct {
+ Definition *ast.Definition
+ GQL *ast.Type
+ GO types.Type
+ CastType types.Type // Before calling marshalling functions cast from/to this base type
+ Marshaler *types.Func // When using external marshalling functions this will point to the Marshal function
+ Unmarshaler *types.Func // When using external marshalling functions this will point to the Unmarshal function
+ IsMarshaler bool // Does the type implement graphql.Marshaler and graphql.Unmarshaler
+}
+
+func (ref *TypeReference) Elem() *TypeReference {
+ if p, isPtr := ref.GO.(*types.Pointer); isPtr {
+ return &TypeReference{
+ GO: p.Elem(),
+ GQL: ref.GQL,
+ CastType: ref.CastType,
+ Definition: ref.Definition,
+ Unmarshaler: ref.Unmarshaler,
+ Marshaler: ref.Marshaler,
+ IsMarshaler: ref.IsMarshaler,
+ }
+ }
+
+ if ref.IsSlice() {
+ return &TypeReference{
+ GO: ref.GO.(*types.Slice).Elem(),
+ GQL: ref.GQL.Elem,
+ CastType: ref.CastType,
+ Definition: ref.Definition,
+ Unmarshaler: ref.Unmarshaler,
+ Marshaler: ref.Marshaler,
+ IsMarshaler: ref.IsMarshaler,
+ }
+ }
+ return nil
+}
+
+func (t *TypeReference) IsPtr() bool {
+ _, isPtr := t.GO.(*types.Pointer)
+ return isPtr
+}
+
+func (t *TypeReference) IsNilable() bool {
+ _, isPtr := t.GO.(*types.Pointer)
+ _, isMap := t.GO.(*types.Map)
+ _, isInterface := t.GO.(*types.Interface)
+ return isPtr || isMap || isInterface
+}
+
+func (t *TypeReference) IsSlice() bool {
+ _, isSlice := t.GO.(*types.Slice)
+ return t.GQL.Elem != nil && isSlice
+}
+
+func (t *TypeReference) IsNamed() bool {
+ _, isSlice := t.GO.(*types.Named)
+ return isSlice
+}
+
+func (t *TypeReference) IsStruct() bool {
+ _, isStruct := t.GO.Underlying().(*types.Struct)
+ return isStruct
+}
+
+func (t *TypeReference) IsScalar() bool {
+ return t.Definition.Kind == ast.Scalar
+}
+
+func (t *TypeReference) HasIsZero() bool {
+ it := t.GO
+ if ptr, isPtr := it.(*types.Pointer); isPtr {
+ it = ptr.Elem()
+ }
+ namedType, ok := it.(*types.Named)
+ if !ok {
+ return false
+ }
+
+ for i := 0; i < namedType.NumMethods(); i++ {
+ switch namedType.Method(i).Name() {
+ case "IsZero":
+ return true
+ }
+ }
+ return false
+}
+
+func (t *TypeReference) UniquenessKey() string {
+ var nullability = "O"
+ if t.GQL.NonNull {
+ nullability = "N"
+ }
+
+ return nullability + t.Definition.Name + "2" + templates.TypeIdentifier(t.GO)
+}
+
+func (t *TypeReference) MarshalFunc() string {
+ if t.Definition == nil {
+ panic(errors.New("Definition missing for " + t.GQL.Name()))
+ }
+
+ if t.Definition.Kind == ast.InputObject {
+ return ""
+ }
+
+ return "marshal" + t.UniquenessKey()
+}
+
+func (t *TypeReference) UnmarshalFunc() string {
+ if t.Definition == nil {
+ panic(errors.New("Definition missing for " + t.GQL.Name()))
+ }
+
+ if !t.Definition.IsInputType() {
+ return ""
+ }
+
+ return "unmarshal" + t.UniquenessKey()
+}
+
+func (b *Binder) PushRef(ret *TypeReference) {
+ b.References = append(b.References, ret)
+}
+
+func isMap(t types.Type) bool {
+ if t == nil {
+ return true
+ }
+ _, ok := t.(*types.Map)
+ return ok
+}
+
+func isIntf(t types.Type) bool {
+ if t == nil {
+ return true
+ }
+ _, ok := t.(*types.Interface)
+ return ok
+}
+
+func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret *TypeReference, err error) {
+ var pkgName, typeName string
+ def := b.schema.Types[schemaType.Name()]
+ defer func() {
+ if err == nil && ret != nil {
+ b.PushRef(ret)
+ }
+ }()
+
+ if len(b.cfg.Models[schemaType.Name()].Model) == 0 {
+ return nil, fmt.Errorf("%s was not found", schemaType.Name())
+ }
+
+ for _, model := range b.cfg.Models[schemaType.Name()].Model {
+ if model == "map[string]interface{}" {
+ if !isMap(bindTarget) {
+ continue
+ }
+ return &TypeReference{
+ Definition: def,
+ GQL: schemaType,
+ GO: MapType,
+ }, nil
+ }
+
+ if model == "interface{}" {
+ if !isIntf(bindTarget) {
+ continue
+ }
+ return &TypeReference{
+ Definition: def,
+ GQL: schemaType,
+ GO: InterfaceType,
+ }, nil
+ }
+
+ pkgName, typeName = code.PkgAndType(model)
+ if pkgName == "" {
+ return nil, fmt.Errorf("missing package name for %s", schemaType.Name())
+ }
+
+ ref := &TypeReference{
+ Definition: def,
+ GQL: schemaType,
+ }
+
+ obj, err := b.FindObject(pkgName, typeName)
+ if err != nil {
+ return nil, err
+ }
+
+ if fun, isFunc := obj.(*types.Func); isFunc {
+ ref.GO = fun.Type().(*types.Signature).Params().At(0).Type()
+ ref.Marshaler = fun
+ ref.Unmarshaler = types.NewFunc(0, fun.Pkg(), "Unmarshal"+typeName, nil)
+ } else if hasMethod(obj.Type(), "MarshalGQL") && hasMethod(obj.Type(), "UnmarshalGQL") {
+ ref.GO = obj.Type()
+ ref.IsMarshaler = true
+ } else if underlying := basicUnderlying(obj.Type()); underlying != nil && underlying.Kind() == types.String {
+ // Special case for named types wrapping strings. Used by default enum implementations.
+
+ ref.GO = obj.Type()
+ ref.CastType = underlying
+
+ underlyingRef, err := b.TypeReference(&ast.Type{NamedType: "String"}, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ ref.Marshaler = underlyingRef.Marshaler
+ ref.Unmarshaler = underlyingRef.Unmarshaler
+ } else {
+ ref.GO = obj.Type()
+ }
+
+ ref.GO = b.CopyModifiersFromAst(schemaType, ref.GO)
+
+ if bindTarget != nil {
+ if err = code.CompatibleTypes(ref.GO, bindTarget); err != nil {
+ continue
+ }
+ ref.GO = bindTarget
+ }
+
+ return ref, nil
+ }
+
+ return nil, fmt.Errorf("%s has type compatible with %s", schemaType.Name(), bindTarget.String())
+}
+
+func (b *Binder) CopyModifiersFromAst(t *ast.Type, base types.Type) types.Type {
+ if t.Elem != nil {
+ return types.NewSlice(b.CopyModifiersFromAst(t.Elem, base))
+ }
+
+ var isInterface bool
+ if named, ok := base.(*types.Named); ok {
+ _, isInterface = named.Underlying().(*types.Interface)
+ }
+
+ if !isInterface && !t.NonNull {
+ return types.NewPointer(base)
+ }
+
+ return base
+}
+
+func hasMethod(it types.Type, name string) bool {
+ if ptr, isPtr := it.(*types.Pointer); isPtr {
+ it = ptr.Elem()
+ }
+ namedType, ok := it.(*types.Named)
+ if !ok {
+ return false
+ }
+
+ for i := 0; i < namedType.NumMethods(); i++ {
+ if namedType.Method(i).Name() == name {
+ return true
+ }
+ }
+ return false
+}
+
+func basicUnderlying(it types.Type) *types.Basic {
+ if ptr, isPtr := it.(*types.Pointer); isPtr {
+ it = ptr.Elem()
+ }
+ namedType, ok := it.(*types.Named)
+ if !ok {
+ return nil
+ }
+
+ if basic, ok := namedType.Underlying().(*types.Basic); ok {
+ return basic
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/config/config.go b/vendor/github.com/99designs/gqlgen/codegen/config/config.go
new file mode 100644
index 00000000..0c72420e
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/config/config.go
@@ -0,0 +1,408 @@
+package config
+
+import (
+ "fmt"
+ "go/types"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/99designs/gqlgen/internal/code"
+ "github.com/pkg/errors"
+ "github.com/vektah/gqlparser"
+ "github.com/vektah/gqlparser/ast"
+ yaml "gopkg.in/yaml.v2"
+)
+
+type Config struct {
+ SchemaFilename StringList `yaml:"schema,omitempty"`
+ Exec PackageConfig `yaml:"exec"`
+ Model PackageConfig `yaml:"model"`
+ Resolver PackageConfig `yaml:"resolver,omitempty"`
+ Models TypeMap `yaml:"models,omitempty"`
+ StructTag string `yaml:"struct_tag,omitempty"`
+}
+
+var cfgFilenames = []string{".gqlgen.yml", "gqlgen.yml", "gqlgen.yaml"}
+
+// DefaultConfig creates a copy of the default config
+func DefaultConfig() *Config {
+ return &Config{
+ SchemaFilename: StringList{"schema.graphql"},
+ Model: PackageConfig{Filename: "models_gen.go"},
+ Exec: PackageConfig{Filename: "generated.go"},
+ }
+}
+
+// LoadConfigFromDefaultLocations looks for a config file in the current directory, and all parent directories
+// walking up the tree. The closest config file will be returned.
+func LoadConfigFromDefaultLocations() (*Config, error) {
+ cfgFile, err := findCfg()
+ if err != nil {
+ return nil, err
+ }
+
+ err = os.Chdir(filepath.Dir(cfgFile))
+ if err != nil {
+ return nil, errors.Wrap(err, "unable to enter config dir")
+ }
+ return LoadConfig(cfgFile)
+}
+
+// LoadConfig reads the gqlgen.yml config file
+func LoadConfig(filename string) (*Config, error) {
+ config := DefaultConfig()
+
+ b, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return nil, errors.Wrap(err, "unable to read config")
+ }
+
+ if err := yaml.UnmarshalStrict(b, config); err != nil {
+ return nil, errors.Wrap(err, "unable to parse config")
+ }
+
+ preGlobbing := config.SchemaFilename
+ config.SchemaFilename = StringList{}
+ for _, f := range preGlobbing {
+ matches, err := filepath.Glob(f)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to glob schema filename %s", f)
+ }
+
+ for _, m := range matches {
+ if config.SchemaFilename.Has(m) {
+ continue
+ }
+ config.SchemaFilename = append(config.SchemaFilename, m)
+ }
+ }
+
+ return config, nil
+}
+
+type PackageConfig struct {
+ Filename string `yaml:"filename,omitempty"`
+ Package string `yaml:"package,omitempty"`
+ Type string `yaml:"type,omitempty"`
+}
+
+type TypeMapEntry struct {
+ Model StringList `yaml:"model"`
+ Fields map[string]TypeMapField `yaml:"fields,omitempty"`
+}
+
+type TypeMapField struct {
+ Resolver bool `yaml:"resolver"`
+ FieldName string `yaml:"fieldName"`
+}
+
+type StringList []string
+
+func (a *StringList) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ var single string
+ err := unmarshal(&single)
+ if err == nil {
+ *a = []string{single}
+ return nil
+ }
+
+ var multi []string
+ err = unmarshal(&multi)
+ if err != nil {
+ return err
+ }
+
+ *a = multi
+ return nil
+}
+
+func (a StringList) Has(file string) bool {
+ for _, existing := range a {
+ if existing == file {
+ return true
+ }
+ }
+ return false
+}
+
+func (c *PackageConfig) normalize() error {
+ if c.Filename == "" {
+ return errors.New("Filename is required")
+ }
+ c.Filename = abs(c.Filename)
+ // If Package is not set, first attempt to load the package at the output dir. If that fails
+ // fallback to just the base dir name of the output filename.
+ if c.Package == "" {
+ c.Package = code.NameForPackage(c.ImportPath())
+ }
+
+ return nil
+}
+
+func (c *PackageConfig) ImportPath() string {
+ return code.ImportPathForDir(c.Dir())
+}
+
+func (c *PackageConfig) Dir() string {
+ return filepath.Dir(c.Filename)
+}
+
+func (c *PackageConfig) Check() error {
+ if strings.ContainsAny(c.Package, "./\\") {
+ return fmt.Errorf("package should be the output package name only, do not include the output filename")
+ }
+ if c.Filename != "" && !strings.HasSuffix(c.Filename, ".go") {
+ return fmt.Errorf("filename should be path to a go source file")
+ }
+
+ return c.normalize()
+}
+
+func (c *PackageConfig) Pkg() *types.Package {
+ return types.NewPackage(c.ImportPath(), c.Dir())
+}
+
+func (c *PackageConfig) IsDefined() bool {
+ return c.Filename != ""
+}
+
+func (c *Config) Check() error {
+ if err := c.Models.Check(); err != nil {
+ return errors.Wrap(err, "config.models")
+ }
+ if err := c.Exec.Check(); err != nil {
+ return errors.Wrap(err, "config.exec")
+ }
+ if err := c.Model.Check(); err != nil {
+ return errors.Wrap(err, "config.model")
+ }
+ if c.Resolver.IsDefined() {
+ if err := c.Resolver.Check(); err != nil {
+ return errors.Wrap(err, "config.resolver")
+ }
+ }
+
+ // check packages names against conflict, if present in the same dir
+ // and check filenames for uniqueness
+ packageConfigList := []PackageConfig{
+ c.Model,
+ c.Exec,
+ c.Resolver,
+ }
+ filesMap := make(map[string]bool)
+ pkgConfigsByDir := make(map[string]PackageConfig)
+ for _, current := range packageConfigList {
+ _, fileFound := filesMap[current.Filename]
+ if fileFound {
+ return fmt.Errorf("filename %s defined more than once", current.Filename)
+ }
+ filesMap[current.Filename] = true
+ previous, inSameDir := pkgConfigsByDir[current.Dir()]
+ if inSameDir && current.Package != previous.Package {
+ return fmt.Errorf("filenames %s and %s are in the same directory but have different package definitions", stripPath(current.Filename), stripPath(previous.Filename))
+ }
+ pkgConfigsByDir[current.Dir()] = current
+ }
+
+ return c.normalize()
+}
+
+func stripPath(path string) string {
+ return filepath.Base(path)
+}
+
+type TypeMap map[string]TypeMapEntry
+
+func (tm TypeMap) Exists(typeName string) bool {
+ _, ok := tm[typeName]
+ return ok
+}
+
+func (tm TypeMap) UserDefined(typeName string) bool {
+ m, ok := tm[typeName]
+ return ok && len(m.Model) > 0
+}
+
+func (tm TypeMap) Check() error {
+ for typeName, entry := range tm {
+ for _, model := range entry.Model {
+ if strings.LastIndex(model, ".") < strings.LastIndex(model, "/") {
+ return fmt.Errorf("model %s: invalid type specifier \"%s\" - you need to specify a struct to map to", typeName, entry.Model)
+ }
+ }
+ }
+ return nil
+}
+
+func (tm TypeMap) ReferencedPackages() []string {
+ var pkgs []string
+
+ for _, typ := range tm {
+ for _, model := range typ.Model {
+ if model == "map[string]interface{}" || model == "interface{}" {
+ continue
+ }
+ pkg, _ := code.PkgAndType(model)
+ if pkg == "" || inStrSlice(pkgs, pkg) {
+ continue
+ }
+ pkgs = append(pkgs, code.QualifyPackagePath(pkg))
+ }
+ }
+
+ sort.Slice(pkgs, func(i, j int) bool {
+ return pkgs[i] > pkgs[j]
+ })
+ return pkgs
+}
+
+func (tm TypeMap) Add(Name string, goType string) {
+ modelCfg := tm[Name]
+ modelCfg.Model = append(modelCfg.Model, goType)
+ tm[Name] = modelCfg
+}
+
+func inStrSlice(haystack []string, needle string) bool {
+ for _, v := range haystack {
+ if needle == v {
+ return true
+ }
+ }
+
+ return false
+}
+
+// findCfg searches for the config file in this directory and all parents up the tree
+// looking for the closest match
+func findCfg() (string, error) {
+ dir, err := os.Getwd()
+ if err != nil {
+ return "", errors.Wrap(err, "unable to get working dir to findCfg")
+ }
+
+ cfg := findCfgInDir(dir)
+
+ for cfg == "" && dir != filepath.Dir(dir) {
+ dir = filepath.Dir(dir)
+ cfg = findCfgInDir(dir)
+ }
+
+ if cfg == "" {
+ return "", os.ErrNotExist
+ }
+
+ return cfg, nil
+}
+
+func findCfgInDir(dir string) string {
+ for _, cfgName := range cfgFilenames {
+ path := filepath.Join(dir, cfgName)
+ if _, err := os.Stat(path); err == nil {
+ return path
+ }
+ }
+ return ""
+}
+
+func (c *Config) normalize() error {
+ if err := c.Model.normalize(); err != nil {
+ return errors.Wrap(err, "model")
+ }
+
+ if err := c.Exec.normalize(); err != nil {
+ return errors.Wrap(err, "exec")
+ }
+
+ if c.Resolver.IsDefined() {
+ if err := c.Resolver.normalize(); err != nil {
+ return errors.Wrap(err, "resolver")
+ }
+ }
+
+ if c.Models == nil {
+ c.Models = TypeMap{}
+ }
+
+ return nil
+}
+
+func (c *Config) InjectBuiltins(s *ast.Schema) {
+ builtins := TypeMap{
+ "__Directive": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Directive"}},
+ "__DirectiveLocation": {Model: StringList{"github.com/99designs/gqlgen/graphql.String"}},
+ "__Type": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Type"}},
+ "__TypeKind": {Model: StringList{"github.com/99designs/gqlgen/graphql.String"}},
+ "__Field": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Field"}},
+ "__EnumValue": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.EnumValue"}},
+ "__InputValue": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.InputValue"}},
+ "__Schema": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Schema"}},
+ "Float": {Model: StringList{"github.com/99designs/gqlgen/graphql.Float"}},
+ "String": {Model: StringList{"github.com/99designs/gqlgen/graphql.String"}},
+ "Boolean": {Model: StringList{"github.com/99designs/gqlgen/graphql.Boolean"}},
+ "Int": {Model: StringList{
+ "github.com/99designs/gqlgen/graphql.Int",
+ "github.com/99designs/gqlgen/graphql.Int32",
+ "github.com/99designs/gqlgen/graphql.Int64",
+ }},
+ "ID": {
+ Model: StringList{
+ "github.com/99designs/gqlgen/graphql.ID",
+ "github.com/99designs/gqlgen/graphql.IntID",
+ },
+ },
+ }
+
+ for typeName, entry := range builtins {
+ if !c.Models.Exists(typeName) {
+ c.Models[typeName] = entry
+ }
+ }
+
+ // These are additional types that are injected if defined in the schema as scalars.
+ extraBuiltins := TypeMap{
+ "Time": {Model: StringList{"github.com/99designs/gqlgen/graphql.Time"}},
+ "Map": {Model: StringList{"github.com/99designs/gqlgen/graphql.Map"}},
+ }
+
+ for typeName, entry := range extraBuiltins {
+ if t, ok := s.Types[typeName]; !c.Models.Exists(typeName) && ok && t.Kind == ast.Scalar {
+ c.Models[typeName] = entry
+ }
+ }
+}
+
+func (c *Config) LoadSchema() (*ast.Schema, map[string]string, error) {
+ schemaStrings := map[string]string{}
+
+ var sources []*ast.Source
+
+ for _, filename := range c.SchemaFilename {
+ filename = filepath.ToSlash(filename)
+ var err error
+ var schemaRaw []byte
+ schemaRaw, err = ioutil.ReadFile(filename)
+ if err != nil {
+ fmt.Fprintln(os.Stderr, "unable to open schema: "+err.Error())
+ os.Exit(1)
+ }
+ schemaStrings[filename] = string(schemaRaw)
+ sources = append(sources, &ast.Source{Name: filename, Input: schemaStrings[filename]})
+ }
+
+ schema, err := gqlparser.LoadSchema(sources...)
+ if err != nil {
+ return nil, nil, err
+ }
+ return schema, schemaStrings, nil
+}
+
+func abs(path string) string {
+ absPath, err := filepath.Abs(path)
+ if err != nil {
+ panic(err)
+ }
+ return filepath.ToSlash(absPath)
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/data.go b/vendor/github.com/99designs/gqlgen/codegen/data.go
new file mode 100644
index 00000000..f2ea70b4
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/data.go
@@ -0,0 +1,168 @@
+package codegen
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/99designs/gqlgen/codegen/config"
+ "github.com/pkg/errors"
+ "github.com/vektah/gqlparser/ast"
+)
+
+// Data is a unified model of the code to be generated. Plugins may modify this structure to do things like implement
+// resolvers or directives automatically (eg grpc, validation)
+type Data struct {
+ Config *config.Config
+ Schema *ast.Schema
+ SchemaStr map[string]string
+ Directives map[string]*Directive
+ Objects Objects
+ Inputs Objects
+ Interfaces map[string]*Interface
+ ReferencedTypes map[string]*config.TypeReference
+ ComplexityRoots map[string]*Object
+
+ QueryRoot *Object
+ MutationRoot *Object
+ SubscriptionRoot *Object
+}
+
+type builder struct {
+ Config *config.Config
+ Schema *ast.Schema
+ SchemaStr map[string]string
+ Binder *config.Binder
+ Directives map[string]*Directive
+}
+
+func BuildData(cfg *config.Config) (*Data, error) {
+ b := builder{
+ Config: cfg,
+ }
+
+ var err error
+ b.Schema, b.SchemaStr, err = cfg.LoadSchema()
+ if err != nil {
+ return nil, err
+ }
+
+ err = cfg.Check()
+ if err != nil {
+ return nil, err
+ }
+
+ cfg.InjectBuiltins(b.Schema)
+
+ b.Binder, err = b.Config.NewBinder(b.Schema)
+ if err != nil {
+ return nil, err
+ }
+
+ b.Directives, err = b.buildDirectives()
+ if err != nil {
+ return nil, err
+ }
+
+ dataDirectives := make(map[string]*Directive)
+ for name, d := range b.Directives {
+ if !d.Builtin {
+ dataDirectives[name] = d
+ }
+ }
+
+ s := Data{
+ Config: cfg,
+ Directives: dataDirectives,
+ Schema: b.Schema,
+ SchemaStr: b.SchemaStr,
+ Interfaces: map[string]*Interface{},
+ }
+
+ for _, schemaType := range b.Schema.Types {
+ switch schemaType.Kind {
+ case ast.Object:
+ obj, err := b.buildObject(schemaType)
+ if err != nil {
+ return nil, errors.Wrap(err, "unable to build object definition")
+ }
+
+ s.Objects = append(s.Objects, obj)
+ case ast.InputObject:
+ input, err := b.buildObject(schemaType)
+ if err != nil {
+ return nil, errors.Wrap(err, "unable to build input definition")
+ }
+
+ s.Inputs = append(s.Inputs, input)
+
+ case ast.Union, ast.Interface:
+ s.Interfaces[schemaType.Name] = b.buildInterface(schemaType)
+ }
+ }
+
+ if s.Schema.Query != nil {
+ s.QueryRoot = s.Objects.ByName(s.Schema.Query.Name)
+ } else {
+ return nil, fmt.Errorf("query entry point missing")
+ }
+
+ if s.Schema.Mutation != nil {
+ s.MutationRoot = s.Objects.ByName(s.Schema.Mutation.Name)
+ }
+
+ if s.Schema.Subscription != nil {
+ s.SubscriptionRoot = s.Objects.ByName(s.Schema.Subscription.Name)
+ }
+
+ if err := b.injectIntrospectionRoots(&s); err != nil {
+ return nil, err
+ }
+
+ s.ReferencedTypes, err = b.buildTypes()
+ if err != nil {
+ return nil, err
+ }
+
+ sort.Slice(s.Objects, func(i, j int) bool {
+ return s.Objects[i].Definition.Name < s.Objects[j].Definition.Name
+ })
+
+ sort.Slice(s.Inputs, func(i, j int) bool {
+ return s.Inputs[i].Definition.Name < s.Inputs[j].Definition.Name
+ })
+
+ return &s, nil
+}
+
+func (b *builder) injectIntrospectionRoots(s *Data) error {
+ obj := s.Objects.ByName(b.Schema.Query.Name)
+ if obj == nil {
+ return fmt.Errorf("root query type must be defined")
+ }
+
+ __type, err := b.buildField(obj, &ast.FieldDefinition{
+ Name: "__type",
+ Type: ast.NamedType("__Type", nil),
+ Arguments: []*ast.ArgumentDefinition{
+ {
+ Name: "name",
+ Type: ast.NonNullNamedType("String", nil),
+ },
+ },
+ })
+ if err != nil {
+ return err
+ }
+
+ __schema, err := b.buildField(obj, &ast.FieldDefinition{
+ Name: "__schema",
+ Type: ast.NamedType("__Schema", nil),
+ })
+ if err != nil {
+ return err
+ }
+
+ obj.Fields = append(obj.Fields, __type, __schema)
+
+ return nil
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/directive.go b/vendor/github.com/99designs/gqlgen/codegen/directive.go
index 8017da06..5a27e8ac 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/directive.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/directive.go
@@ -4,11 +4,101 @@ import (
"fmt"
"strconv"
"strings"
+
+ "github.com/99designs/gqlgen/codegen/templates"
+ "github.com/pkg/errors"
+ "github.com/vektah/gqlparser/ast"
)
type Directive struct {
- Name string
- Args []FieldArgument
+ Name string
+ Args []*FieldArgument
+ Builtin bool
+}
+
+func (b *builder) buildDirectives() (map[string]*Directive, error) {
+ directives := make(map[string]*Directive, len(b.Schema.Directives))
+
+ for name, dir := range b.Schema.Directives {
+ if _, ok := directives[name]; ok {
+ return nil, errors.Errorf("directive with name %s already exists", name)
+ }
+
+ var builtin bool
+ if name == "skip" || name == "include" || name == "deprecated" {
+ builtin = true
+ }
+
+ var args []*FieldArgument
+ for _, arg := range dir.Arguments {
+ tr, err := b.Binder.TypeReference(arg.Type, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ newArg := &FieldArgument{
+ ArgumentDefinition: arg,
+ TypeReference: tr,
+ VarName: templates.ToGoPrivate(arg.Name),
+ }
+
+ if arg.DefaultValue != nil {
+ var err error
+ newArg.Default, err = arg.DefaultValue.Value(nil)
+ if err != nil {
+ return nil, errors.Errorf("default value for directive argument %s(%s) is not valid: %s", dir.Name, arg.Name, err.Error())
+ }
+ }
+ args = append(args, newArg)
+ }
+
+ directives[name] = &Directive{
+ Name: name,
+ Args: args,
+ Builtin: builtin,
+ }
+ }
+
+ return directives, nil
+}
+
+func (b *builder) getDirectives(list ast.DirectiveList) ([]*Directive, error) {
+ dirs := make([]*Directive, len(list))
+ for i, d := range list {
+ argValues := make(map[string]interface{}, len(d.Arguments))
+ for _, da := range d.Arguments {
+ val, err := da.Value.Value(nil)
+ if err != nil {
+ return nil, err
+ }
+ argValues[da.Name] = val
+ }
+ def, ok := b.Directives[d.Name]
+ if !ok {
+ return nil, fmt.Errorf("directive %s not found", d.Name)
+ }
+
+ var args []*FieldArgument
+ for _, a := range def.Args {
+ value := a.Default
+ if argValue, ok := argValues[a.Name]; ok {
+ value = argValue
+ }
+ args = append(args, &FieldArgument{
+ ArgumentDefinition: a.ArgumentDefinition,
+ Value: value,
+ VarName: a.VarName,
+ TypeReference: a.TypeReference,
+ })
+ }
+ dirs[i] = &Directive{
+ Name: d.Name,
+ Args: args,
+ }
+
+ }
+
+ return dirs, nil
}
func (d *Directive) ArgsFunc() string {
@@ -23,7 +113,28 @@ func (d *Directive) CallArgs() string {
args := []string{"ctx", "obj", "n"}
for _, arg := range d.Args {
- args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
+ args = append(args, "args["+strconv.Quote(arg.Name)+"].("+templates.CurrentImports.LookupType(arg.TypeReference.GO)+")")
+ }
+
+ return strings.Join(args, ", ")
+}
+
+func (d *Directive) ResolveArgs(obj string, next string) string {
+ args := []string{"ctx", obj, next}
+
+ for _, arg := range d.Args {
+ dArg := "&" + arg.VarName
+ if !arg.TypeReference.IsPtr() {
+ if arg.Value != nil {
+ dArg = templates.Dump(arg.Value)
+ } else {
+ dArg = templates.Dump(arg.Default)
+ }
+ } else if arg.Value == nil && arg.Default == nil {
+ dArg = "nil"
+ }
+
+ args = append(args, dArg)
}
return strings.Join(args, ", ")
@@ -33,7 +144,7 @@ func (d *Directive) Declaration() string {
res := ucFirst(d.Name) + " func(ctx context.Context, obj interface{}, next graphql.Resolver"
for _, arg := range d.Args {
- res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
+ res += fmt.Sprintf(", %s %s", arg.Name, templates.CurrentImports.LookupType(arg.TypeReference.GO))
}
res += ") (res interface{}, err error)"
diff --git a/vendor/github.com/99designs/gqlgen/codegen/directive_build.go b/vendor/github.com/99designs/gqlgen/codegen/directive_build.go
deleted file mode 100644
index af77dc44..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/directive_build.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package codegen
-
-import (
- "sort"
-
- "github.com/pkg/errors"
-)
-
-func (cfg *Config) buildDirectives(types NamedTypes) ([]*Directive, error) {
- var directives []*Directive
-
- for name, dir := range cfg.schema.Directives {
- if name == "skip" || name == "include" || name == "deprecated" {
- continue
- }
-
- var args []FieldArgument
- for _, arg := range dir.Arguments {
- newArg := FieldArgument{
- GQLName: arg.Name,
- Type: types.getType(arg.Type),
- GoVarName: sanitizeArgName(arg.Name),
- }
-
- if !newArg.Type.IsInput && !newArg.Type.IsScalar {
- return nil, errors.Errorf("%s cannot be used as argument of directive %s(%s) only input and scalar types are allowed", arg.Type, dir.Name, arg.Name)
- }
-
- if arg.DefaultValue != nil {
- var err error
- newArg.Default, err = arg.DefaultValue.Value(nil)
- if err != nil {
- return nil, errors.Errorf("default value for directive argument %s(%s) is not valid: %s", dir.Name, arg.Name, err.Error())
- }
- }
- args = append(args, newArg)
- }
-
- directives = append(directives, &Directive{
- Name: name,
- Args: args,
- })
- }
-
- sort.Slice(directives, func(i, j int) bool { return directives[i].Name < directives[j].Name })
-
- return directives, nil
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/enum.go b/vendor/github.com/99designs/gqlgen/codegen/enum.go
deleted file mode 100644
index 7804971c..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/enum.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package codegen
-
-type Enum struct {
- *NamedType
- Description string
- Values []EnumValue
-}
-
-type EnumValue struct {
- Name string
- Description string
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/enum_build.go b/vendor/github.com/99designs/gqlgen/codegen/enum_build.go
deleted file mode 100644
index 457d923f..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/enum_build.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package codegen
-
-import (
- "sort"
- "strings"
-
- "github.com/99designs/gqlgen/codegen/templates"
- "github.com/vektah/gqlparser/ast"
-)
-
-func (cfg *Config) buildEnums(types NamedTypes) []Enum {
- var enums []Enum
-
- for _, typ := range cfg.schema.Types {
- namedType := types[typ.Name]
- if typ.Kind != ast.Enum || strings.HasPrefix(typ.Name, "__") || namedType.IsUserDefined {
- continue
- }
-
- var values []EnumValue
- for _, v := range typ.EnumValues {
- values = append(values, EnumValue{v.Name, v.Description})
- }
-
- enum := Enum{
- NamedType: namedType,
- Values: values,
- Description: typ.Description,
- }
- enum.GoType = templates.ToCamel(enum.GQLType)
- enums = append(enums, enum)
- }
-
- sort.Slice(enums, func(i, j int) bool {
- return enums[i].GQLType < enums[j].GQLType
- })
-
- return enums
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/field.go b/vendor/github.com/99designs/gqlgen/codegen/field.go
new file mode 100644
index 00000000..f5f7b221
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/field.go
@@ -0,0 +1,394 @@
+package codegen
+
+import (
+ "fmt"
+ "go/types"
+ "log"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "github.com/99designs/gqlgen/codegen/config"
+ "github.com/99designs/gqlgen/codegen/templates"
+ "github.com/pkg/errors"
+ "github.com/vektah/gqlparser/ast"
+)
+
+type Field struct {
+ *ast.FieldDefinition
+
+ TypeReference *config.TypeReference
+ GoFieldType GoFieldType // The field type in go, if any
+ GoReceiverName string // The name of method & var receiver in go, if any
+ GoFieldName string // The name of the method or var in go, if any
+ IsResolver bool // Does this field need a resolver
+ Args []*FieldArgument // A list of arguments to be passed to this field
+ MethodHasContext bool // If this is bound to a go method, does the method also take a context
+ NoErr bool // If this is bound to a go method, does that method have an error as the second argument
+ Object *Object // A link back to the parent object
+ Default interface{} // The default value
+ Directives []*Directive
+}
+
+func (b *builder) buildField(obj *Object, field *ast.FieldDefinition) (*Field, error) {
+ dirs, err := b.getDirectives(field.Directives)
+ if err != nil {
+ return nil, err
+ }
+
+ f := Field{
+ FieldDefinition: field,
+ Object: obj,
+ Directives: dirs,
+ GoFieldName: templates.ToGo(field.Name),
+ GoFieldType: GoFieldVariable,
+ GoReceiverName: "obj",
+ }
+
+ if field.DefaultValue != nil {
+ var err error
+ f.Default, err = field.DefaultValue.Value(nil)
+ if err != nil {
+ return nil, errors.Errorf("default value %s is not valid: %s", field.Name, err.Error())
+ }
+ }
+
+ for _, arg := range field.Arguments {
+ newArg, err := b.buildArg(obj, arg)
+ if err != nil {
+ return nil, err
+ }
+ f.Args = append(f.Args, newArg)
+ }
+
+ if err = b.bindField(obj, &f); err != nil {
+ f.IsResolver = true
+ log.Println(err.Error())
+ }
+
+ if f.IsResolver && !f.TypeReference.IsPtr() && f.TypeReference.IsStruct() {
+ f.TypeReference = b.Binder.PointerTo(f.TypeReference)
+ }
+
+ return &f, nil
+}
+
+func (b *builder) bindField(obj *Object, f *Field) error {
+ defer func() {
+ if f.TypeReference == nil {
+ tr, err := b.Binder.TypeReference(f.Type, nil)
+ if err != nil {
+ panic(err)
+ }
+ f.TypeReference = tr
+ }
+ }()
+
+ switch {
+ case f.Name == "__schema":
+ f.GoFieldType = GoFieldMethod
+ f.GoReceiverName = "ec"
+ f.GoFieldName = "introspectSchema"
+ return nil
+ case f.Name == "__type":
+ f.GoFieldType = GoFieldMethod
+ f.GoReceiverName = "ec"
+ f.GoFieldName = "introspectType"
+ return nil
+ case obj.Root:
+ f.IsResolver = true
+ return nil
+ case b.Config.Models[obj.Name].Fields[f.Name].Resolver:
+ f.IsResolver = true
+ return nil
+ case obj.Type == config.MapType:
+ f.GoFieldType = GoFieldMap
+ return nil
+ case b.Config.Models[obj.Name].Fields[f.Name].FieldName != "":
+ f.GoFieldName = b.Config.Models[obj.Name].Fields[f.Name].FieldName
+ }
+
+ target, err := b.findBindTarget(obj.Type.(*types.Named), f.GoFieldName)
+ if err != nil {
+ return err
+ }
+
+ pos := b.Binder.ObjectPosition(target)
+
+ switch target := target.(type) {
+ case nil:
+ objPos := b.Binder.TypePosition(obj.Type)
+ return fmt.Errorf(
+ "%s:%d adding resolver method for %s.%s, nothing matched",
+ objPos.Filename,
+ objPos.Line,
+ obj.Name,
+ f.Name,
+ )
+
+ case *types.Func:
+ sig := target.Type().(*types.Signature)
+ if sig.Results().Len() == 1 {
+ f.NoErr = true
+ } else if sig.Results().Len() != 2 {
+ return fmt.Errorf("method has wrong number of args")
+ }
+ params := sig.Params()
+ // If the first argument is the context, remove it from the comparison and set
+ // the MethodHasContext flag so that the context will be passed to this model's method
+ if params.Len() > 0 && params.At(0).Type().String() == "context.Context" {
+ f.MethodHasContext = true
+ vars := make([]*types.Var, params.Len()-1)
+ for i := 1; i < params.Len(); i++ {
+ vars[i-1] = params.At(i)
+ }
+ params = types.NewTuple(vars...)
+ }
+
+ if err = b.bindArgs(f, params); err != nil {
+ return errors.Wrapf(err, "%s:%d", pos.Filename, pos.Line)
+ }
+
+ result := sig.Results().At(0)
+ tr, err := b.Binder.TypeReference(f.Type, result.Type())
+ if err != nil {
+ return err
+ }
+
+ // success, args and return type match. Bind to method
+ f.GoFieldType = GoFieldMethod
+ f.GoReceiverName = "obj"
+ f.GoFieldName = target.Name()
+ f.TypeReference = tr
+
+ return nil
+
+ case *types.Var:
+ tr, err := b.Binder.TypeReference(f.Type, target.Type())
+ if err != nil {
+ return err
+ }
+
+ // success, bind to var
+ f.GoFieldType = GoFieldVariable
+ f.GoReceiverName = "obj"
+ f.GoFieldName = target.Name()
+ f.TypeReference = tr
+
+ return nil
+ default:
+ panic(fmt.Errorf("unknown bind target %T for %s", target, f.Name))
+ }
+}
+
+// findField attempts to match the name to a struct field with the following
+// priorites:
+// 1. Any method with a matching name
+// 2. Any Fields with a struct tag (see config.StructTag)
+// 3. Any fields with a matching name
+// 4. Same logic again for embedded fields
+func (b *builder) findBindTarget(named *types.Named, name string) (types.Object, error) {
+ for i := 0; i < named.NumMethods(); i++ {
+ method := named.Method(i)
+ if !method.Exported() {
+ continue
+ }
+
+ if !strings.EqualFold(method.Name(), name) {
+ continue
+ }
+
+ return method, nil
+ }
+
+ strukt, ok := named.Underlying().(*types.Struct)
+ if !ok {
+ return nil, fmt.Errorf("not a struct")
+ }
+ return b.findBindStructTarget(strukt, name)
+}
+
+func (b *builder) findBindStructTarget(strukt *types.Struct, name string) (types.Object, error) {
+ // struct tags have the highest priority
+ if b.Config.StructTag != "" {
+ var foundField *types.Var
+ for i := 0; i < strukt.NumFields(); i++ {
+ field := strukt.Field(i)
+ if !field.Exported() {
+ continue
+ }
+ tags := reflect.StructTag(strukt.Tag(i))
+ if val, ok := tags.Lookup(b.Config.StructTag); ok && equalFieldName(val, name) {
+ if foundField != nil {
+ return nil, errors.Errorf("tag %s is ambigious; multiple fields have the same tag value of %s", b.Config.StructTag, val)
+ }
+
+ foundField = field
+ }
+ }
+ if foundField != nil {
+ return foundField, nil
+ }
+ }
+
+ // Then matching field names
+ for i := 0; i < strukt.NumFields(); i++ {
+ field := strukt.Field(i)
+ if !field.Exported() {
+ continue
+ }
+ if equalFieldName(field.Name(), name) { // aqui!
+ return field, nil
+ }
+ }
+
+ // Then look in embedded structs
+ for i := 0; i < strukt.NumFields(); i++ {
+ field := strukt.Field(i)
+ if !field.Exported() {
+ continue
+ }
+
+ if !field.Anonymous() {
+ continue
+ }
+
+ fieldType := field.Type()
+ if ptr, ok := fieldType.(*types.Pointer); ok {
+ fieldType = ptr.Elem()
+ }
+
+ switch fieldType := fieldType.(type) {
+ case *types.Named:
+ f, err := b.findBindTarget(fieldType, name)
+ if err != nil {
+ return nil, err
+ }
+ if f != nil {
+ return f, nil
+ }
+ case *types.Struct:
+ f, err := b.findBindStructTarget(fieldType, name)
+ if err != nil {
+ return nil, err
+ }
+ if f != nil {
+ return f, nil
+ }
+ default:
+ panic(fmt.Errorf("unknown embedded field type %T", field.Type()))
+ }
+ }
+
+ return nil, nil
+}
+
+func (f *Field) HasDirectives() bool {
+ return len(f.Directives) > 0
+}
+
+func (f *Field) IsReserved() bool {
+ return strings.HasPrefix(f.Name, "__")
+}
+
+func (f *Field) IsMethod() bool {
+ return f.GoFieldType == GoFieldMethod
+}
+
+func (f *Field) IsVariable() bool {
+ return f.GoFieldType == GoFieldVariable
+}
+
+func (f *Field) IsMap() bool {
+ return f.GoFieldType == GoFieldMap
+}
+
+func (f *Field) IsConcurrent() bool {
+ if f.Object.DisableConcurrency {
+ return false
+ }
+ return f.MethodHasContext || f.IsResolver
+}
+
+func (f *Field) GoNameUnexported() string {
+ return templates.ToGoPrivate(f.Name)
+}
+
+func (f *Field) ShortInvocation() string {
+ return fmt.Sprintf("%s().%s(%s)", f.Object.Definition.Name, f.GoFieldName, f.CallArgs())
+}
+
+func (f *Field) ArgsFunc() string {
+ if len(f.Args) == 0 {
+ return ""
+ }
+
+ return "field_" + f.Object.Definition.Name + "_" + f.Name + "_args"
+}
+
+func (f *Field) ResolverType() string {
+ if !f.IsResolver {
+ return ""
+ }
+
+ return fmt.Sprintf("%s().%s(%s)", f.Object.Definition.Name, f.GoFieldName, f.CallArgs())
+}
+
+func (f *Field) ShortResolverDeclaration() string {
+ res := "(ctx context.Context"
+
+ if !f.Object.Root {
+ res += fmt.Sprintf(", obj *%s", templates.CurrentImports.LookupType(f.Object.Type))
+ }
+ for _, arg := range f.Args {
+ res += fmt.Sprintf(", %s %s", arg.VarName, templates.CurrentImports.LookupType(arg.TypeReference.GO))
+ }
+
+ result := templates.CurrentImports.LookupType(f.TypeReference.GO)
+ if f.Object.Stream {
+ result = "<-chan " + result
+ }
+
+ res += fmt.Sprintf(") (%s, error)", result)
+ return res
+}
+
+func (f *Field) ComplexitySignature() string {
+ res := fmt.Sprintf("func(childComplexity int")
+ for _, arg := range f.Args {
+ res += fmt.Sprintf(", %s %s", arg.VarName, templates.CurrentImports.LookupType(arg.TypeReference.GO))
+ }
+ res += ") int"
+ return res
+}
+
+func (f *Field) ComplexityArgs() string {
+ var args []string
+ for _, arg := range f.Args {
+ args = append(args, "args["+strconv.Quote(arg.Name)+"].("+templates.CurrentImports.LookupType(arg.TypeReference.GO)+")")
+ }
+
+ return strings.Join(args, ", ")
+}
+
+func (f *Field) CallArgs() string {
+ var args []string
+
+ if f.IsResolver {
+ args = append(args, "rctx")
+
+ if !f.Object.Root {
+ args = append(args, "obj")
+ }
+ } else {
+ if f.MethodHasContext {
+ args = append(args, "ctx")
+ }
+ }
+
+ for _, arg := range f.Args {
+ args = append(args, "args["+strconv.Quote(arg.Name)+"].("+templates.CurrentImports.LookupType(arg.TypeReference.GO)+")")
+ }
+
+ return strings.Join(args, ", ")
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/field.gotpl b/vendor/github.com/99designs/gqlgen/codegen/field.gotpl
index 3df847fa..9718a08a 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/field.gotpl
+++ b/vendor/github.com/99designs/gqlgen/codegen/field.gotpl
@@ -1,19 +1,19 @@
-{{ $field := . }}
-{{ $object := $field.Object }}
+{{- range $object := .Objects }}{{- range $field := $object.Fields }}
{{- if $object.Stream }}
- func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {
+ func (ec *executionContext) _{{$object.Name}}_{{$field.Name}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {
+ ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ Field: field,
+ Args: nil,
+ })
{{- if $field.Args }}
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := {{ $field.ArgsFunc }}(rawArgs)
+ args, err := ec.{{ $field.ArgsFunc }}(ctx,rawArgs)
if err != nil {
ec.Error(ctx, err)
return nil
}
{{- end }}
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
- Field: field,
- })
// FIXME: subscriptions are missing request middleware stack https://github.com/99designs/gqlgen/issues/259
// and Tracer stack
rctx := ctx
@@ -27,35 +27,51 @@
if !ok {
return nil
}
- var out graphql.OrderedMap
- out.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())
- return &out
+ return graphql.WriterFunc(func(w io.Writer) {
+ w.Write([]byte{'{'})
+ graphql.MarshalString(field.Alias).MarshalGQL(w)
+ w.Write([]byte{':'})
+ ec.{{ $field.TypeReference.MarshalFunc }}(ctx, field.Selections, res).MarshalGQL(w)
+ w.Write([]byte{'}'})
+ })
}
}
{{ else }}
- // nolint: vetshadow
- func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {
+ func (ec *executionContext) _{{$object.Name}}_{{$field.Name}}(ctx context.Context, field graphql.CollectedField{{ if not $object.Root }}, obj {{$object.Reference | ref}}{{end}}) graphql.Marshaler {
ctx = ec.Tracer.StartFieldExecution(ctx, field)
defer func () { ec.Tracer.EndFieldExecution(ctx) }()
+ rctx := &graphql.ResolverContext{
+ Object: {{$object.Name|quote}},
+ Field: field,
+ Args: nil,
+ IsMethod: {{or $field.IsMethod $field.IsResolver}},
+ }
+ ctx = graphql.WithResolverContext(ctx, rctx)
{{- if $field.Args }}
rawArgs := field.ArgumentMap(ec.Variables)
- args, err := {{ $field.ArgsFunc }}(rawArgs)
+ args, err := ec.{{ $field.ArgsFunc }}(ctx,rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
+ rctx.Args = args
{{- end }}
- rctx := &graphql.ResolverContext{
- Object: {{$object.GQLType|quote}},
- Args: {{if $field.Args }}args{{else}}nil{{end}},
- Field: field,
- }
- ctx = graphql.WithResolverContext(ctx, rctx)
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, {{if $object.Root}}nil{{else}}obj{{end}}, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children
{{- if $field.IsResolver }}
return ec.resolvers.{{ $field.ShortInvocation }}
+ {{- else if $field.IsMap }}
+ switch v := {{$field.GoReceiverName}}[{{$field.Name|quote}}].(type) {
+ case {{$field.TypeReference.GO | ref}}:
+ return v, nil
+ case {{$field.TypeReference.Elem.GO | ref}}:
+ return &v, nil
+ case nil:
+ return ({{$field.TypeReference.GO | ref}})(nil), nil
+ default:
+ return nil, fmt.Errorf("unexpected type %T for field %s", v, {{ $field.Name | quote}})
+ }
{{- else if $field.IsMethod }}
{{- if $field.NoErr }}
return {{$field.GoReceiverName}}.{{$field.GoFieldName}}({{ $field.CallArgs }}), nil
@@ -67,16 +83,18 @@
{{- end }}
})
if resTmp == nil {
- {{- if $field.ASTType.NonNull }}
+ {{- if $field.TypeReference.GQL.NonNull }}
if !ec.HasError(rctx) {
ec.Errorf(ctx, "must not be null")
}
{{- end }}
return graphql.Null
}
- res := resTmp.({{$field.Signature}})
+ res := resTmp.({{$field.TypeReference.GO | ref}})
rctx.Result = res
ctx = ec.Tracer.StartFieldChildExecution(ctx)
- {{ $field.WriteJson }}
+ return ec.{{ $field.TypeReference.MarshalFunc }}(ctx, field.Selections, res)
}
{{ end }}
+
+{{- end }}{{- end}}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/generate.go b/vendor/github.com/99designs/gqlgen/codegen/generate.go
new file mode 100644
index 00000000..eafa3f87
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/generate.go
@@ -0,0 +1,15 @@
+package codegen
+
+import (
+ "github.com/99designs/gqlgen/codegen/templates"
+)
+
+func GenerateCode(data *Data) error {
+ return templates.Render(templates.Options{
+ PackageName: data.Config.Exec.Package,
+ Filename: data.Config.Exec.Filename,
+ Data: data,
+ RegionTags: true,
+ GeneratedHeader: true,
+ })
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/generated.gotpl b/vendor/github.com/99designs/gqlgen/codegen/generated!.gotpl
index a37a1613..dce8ce97 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/generated.gotpl
+++ b/vendor/github.com/99designs/gqlgen/codegen/generated!.gotpl
@@ -1,24 +1,17 @@
-// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
+{{ reserveImport "context" }}
+{{ reserveImport "fmt" }}
+{{ reserveImport "io" }}
+{{ reserveImport "strconv" }}
+{{ reserveImport "time" }}
+{{ reserveImport "sync" }}
+{{ reserveImport "errors" }}
+{{ reserveImport "bytes" }}
-package {{ .PackageName }}
+{{ reserveImport "github.com/vektah/gqlparser" }}
+{{ reserveImport "github.com/vektah/gqlparser/ast" }}
+{{ reserveImport "github.com/99designs/gqlgen/graphql" }}
+{{ reserveImport "github.com/99designs/gqlgen/graphql/introspection" }}
-import (
- %%%IMPORTS%%%
-
- {{ reserveImport "context" }}
- {{ reserveImport "fmt" }}
- {{ reserveImport "io" }}
- {{ reserveImport "strconv" }}
- {{ reserveImport "time" }}
- {{ reserveImport "sync" }}
- {{ reserveImport "errors" }}
- {{ reserveImport "bytes" }}
-
- {{ reserveImport "github.com/vektah/gqlparser" }}
- {{ reserveImport "github.com/vektah/gqlparser/ast" }}
- {{ reserveImport "github.com/99designs/gqlgen/graphql" }}
- {{ reserveImport "github.com/99designs/gqlgen/graphql/introspection" }}
-)
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
@@ -38,7 +31,7 @@ type Config struct {
type ResolverRoot interface {
{{- range $object := .Objects -}}
{{ if $object.HasResolvers -}}
- {{$object.GQLType}}() {{$object.GQLType}}Resolver
+ {{$object.Name}}() {{$object.Name}}Resolver
{{ end }}
{{- end }}
}
@@ -52,10 +45,10 @@ type DirectiveRoot struct {
type ComplexityRoot struct {
{{ range $object := .Objects }}
{{ if not $object.IsReserved -}}
- {{ $object.GQLType|toCamel }} struct {
- {{ range $field := $object.Fields -}}
+ {{ $object.Name|go }} struct {
+ {{ range $field := $object.UniqueFields -}}
{{ if not $field.IsReserved -}}
- {{ $field.GQLName|toCamel }} {{ $field.ComplexitySignature }}
+ {{ $field.GoFieldName }} {{ $field.ComplexitySignature }}
{{ end }}
{{- end }}
}
@@ -65,32 +58,16 @@ type ComplexityRoot struct {
{{ range $object := .Objects -}}
{{ if $object.HasResolvers }}
- type {{$object.GQLType}}Resolver interface {
+ type {{$object.Name}}Resolver interface {
{{ range $field := $object.Fields -}}
- {{ $field.ShortResolverDeclaration }}
+ {{- if $field.IsResolver }}
+ {{- $field.GoFieldName}}{{ $field.ShortResolverDeclaration }}
+ {{- end }}
{{ end }}
}
{{- end }}
{{- end }}
-{{ range $object := .Objects -}}
- {{ range $field := $object.Fields -}}
- {{ if $field.Args }}
- func {{ $field.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- {{ template "args.gotpl" $field.Args }}
- }
- {{ end }}
- {{ end }}
-{{- end }}
-
-{{ range $directive := .Directives }}
- {{ if $directive.Args }}
- func {{ $directive.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {
- {{ template "args.gotpl" $directive.Args }}
- }
- {{ end }}
-{{ end }}
-
type executableSchema struct {
resolvers ResolverRoot
directives DirectiveRoot
@@ -102,22 +79,24 @@ func (e *executableSchema) Schema() *ast.Schema {
}
func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
+ ec := executionContext{nil, e}
+ _ = ec
switch typeName + "." + field {
{{ range $object := .Objects }}
{{ if not $object.IsReserved }}
- {{ range $field := $object.Fields }}
+ {{ range $field := $object.UniqueFields }}
{{ if not $field.IsReserved }}
- case "{{$object.GQLType}}.{{$field.GQLName}}":
- if e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}} == nil {
+ case "{{$object.Name}}.{{$field.GoFieldName}}":
+ if e.complexity.{{$object.Name|go}}.{{$field.GoFieldName}} == nil {
break
}
{{ if $field.Args }}
- args, err := {{ $field.ArgsFunc }}(rawArgs)
+ args, err := ec.{{ $field.ArgsFunc }}(context.TODO(),rawArgs)
if err != nil {
return 0, false
}
{{ end }}
- return e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}}(childComplexity{{if $field.Args}}, {{$field.ComplexityArgs}} {{end}}), true
+ return e.complexity.{{$object.Name|go}}.{{$field.GoFieldName}}(childComplexity{{if $field.Args}}, {{$field.ComplexityArgs}} {{end}}), true
{{ end }}
{{ end }}
{{ end }}
@@ -131,7 +110,7 @@ func (e *executableSchema) Query(ctx context.Context, op *ast.OperationDefinitio
ec := executionContext{graphql.GetRequestContext(ctx), e}
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- data := ec._{{.QueryRoot.GQLType}}(ctx, op.SelectionSet)
+ data := ec._{{.QueryRoot.Name}}(ctx, op.SelectionSet)
var buf bytes.Buffer
data.MarshalGQL(&buf)
return buf.Bytes()
@@ -140,7 +119,8 @@ func (e *executableSchema) Query(ctx context.Context, op *ast.OperationDefinitio
return &graphql.Response{
Data: buf,
Errors: ec.Errors,
- Extensions: ec.Extensions, }
+ Extensions: ec.Extensions,
+ }
{{- else }}
return graphql.ErrorResponse(ctx, "queries are not supported")
{{- end }}
@@ -151,7 +131,7 @@ func (e *executableSchema) Mutation(ctx context.Context, op *ast.OperationDefini
ec := executionContext{graphql.GetRequestContext(ctx), e}
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
- data := ec._{{.MutationRoot.GQLType}}(ctx, op.SelectionSet)
+ data := ec._{{.MutationRoot.Name}}(ctx, op.SelectionSet)
var buf bytes.Buffer
data.MarshalGQL(&buf)
return buf.Bytes()
@@ -171,7 +151,7 @@ func (e *executableSchema) Subscription(ctx context.Context, op *ast.OperationDe
{{- if .SubscriptionRoot }}
ec := executionContext{graphql.GetRequestContext(ctx), e}
- next := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.SelectionSet)
+ next := ec._{{.SubscriptionRoot.Name}}(ctx, op.SelectionSet)
if ec.Errors != nil {
return graphql.OneShot(&graphql.Response{Data: []byte("null"), Errors: ec.Errors})
}
@@ -209,22 +189,6 @@ type executionContext struct {
*executableSchema
}
-{{- range $object := .Objects }}
- {{ template "object.gotpl" $object }}
-
- {{- range $field := $object.Fields }}
- {{ template "field.gotpl" $field }}
- {{ end }}
-{{- end}}
-
-{{- range $interface := .Interfaces }}
- {{ template "interface.gotpl" $interface }}
-{{- end }}
-
-{{- range $input := .Inputs }}
- {{ template "input.gotpl" $input }}
-{{- end }}
-
func (ec *executionContext) FieldMiddleware(ctx context.Context, obj interface{}, next graphql.Resolver) (ret interface{}) {
defer func() {
if r := recover(); r != nil {
@@ -241,7 +205,7 @@ func (ec *executionContext) FieldMiddleware(ctx context.Context, obj interface{}
if ec.directives.{{$directive.Name|ucFirst}} != nil {
{{- if $directive.Args }}
rawArgs := d.ArgumentMap(ec.Variables)
- args, err := {{ $directive.ArgsFunc }}(rawArgs)
+ args, err := ec.{{ $directive.ArgsFunc }}(ctx,rawArgs)
if err != nil {
ec.Error(ctx, err)
return nil
@@ -279,7 +243,7 @@ func (ec *executionContext) introspectType(name string) (*introspection.Type, er
}
var parsedSchema = gqlparser.MustLoadSchema(
- {{- range $filename, $schema := .SchemaRaw }}
+ {{- range $filename, $schema := .SchemaStr }}
&ast.Source{Name: {{$filename|quote}}, Input: {{$schema|rawQuote}}},
{{- end }}
)
diff --git a/vendor/github.com/99designs/gqlgen/codegen/input.gotpl b/vendor/github.com/99designs/gqlgen/codegen/input.gotpl
new file mode 100644
index 00000000..c8ac7ad3
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/input.gotpl
@@ -0,0 +1,56 @@
+{{- range $input := .Inputs }}
+ {{- if not .HasUnmarshal }}
+ func (ec *executionContext) unmarshalInput{{ .Name }}(ctx context.Context, v interface{}) ({{.Type | ref}}, error) {
+ var it {{.Type | ref}}
+ var asMap = v.(map[string]interface{})
+ {{ range $field := .Fields}}
+ {{- if $field.Default}}
+ if _, present := asMap[{{$field.Name|quote}}] ; !present {
+ asMap[{{$field.Name|quote}}] = {{ $field.Default | dump }}
+ }
+ {{- end}}
+ {{- end }}
+
+ for k, v := range asMap {
+ switch k {
+ {{- range $field := .Fields }}
+ case {{$field.Name|quote}}:
+ var err error
+ {{- if $field.Directives }}
+ getField0 := func(ctx context.Context) (interface{}, error) { return ec.{{ $field.TypeReference.UnmarshalFunc }}(ctx, v) }
+
+ {{- range $i, $directive := $field.Directives }}
+ getField{{add $i 1}} := func(ctx context.Context) (res interface{}, err error) {
+ {{- range $dArg := $directive.Args }}
+ {{- if and $dArg.TypeReference.IsPtr ( notNil "Value" $dArg ) }}
+ {{ $dArg.VarName }} := {{ $dArg.Value | dump }}
+ {{- end }}
+ {{- end }}
+ n := getField{{$i}}
+ return ec.directives.{{$directive.Name|ucFirst}}({{$directive.ResolveArgs "it" "n" }})
+ }
+ {{- end }}
+
+ tmp, err := getField{{$field.Directives|len}}(ctx)
+ if err != nil {
+ return it, err
+ }
+ if data, ok := tmp.({{ $field.TypeReference.GO | ref }}) ; ok {
+ it.{{$field.GoFieldName}} = data
+ } else {
+ return it, fmt.Errorf(`unexpected type %T from directive, should be {{ $field.TypeReference.GO }}`, tmp)
+ }
+ {{- else }}
+ it.{{$field.GoFieldName}}, err = ec.{{ $field.TypeReference.UnmarshalFunc }}(ctx, v)
+ if err != nil {
+ return it, err
+ }
+ {{- end }}
+ {{- end }}
+ }
+ }
+
+ return it, nil
+ }
+ {{- end }}
+{{ end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/input_build.go b/vendor/github.com/99designs/gqlgen/codegen/input_build.go
deleted file mode 100644
index 70fa564d..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/input_build.go
+++ /dev/null
@@ -1,96 +0,0 @@
-package codegen
-
-import (
- "go/types"
- "sort"
-
- "github.com/pkg/errors"
- "github.com/vektah/gqlparser/ast"
- "golang.org/x/tools/go/loader"
-)
-
-func (cfg *Config) buildInputs(namedTypes NamedTypes, prog *loader.Program) (Objects, error) {
- var inputs Objects
-
- for _, typ := range cfg.schema.Types {
- switch typ.Kind {
- case ast.InputObject:
- input, err := cfg.buildInput(namedTypes, typ)
- if err != nil {
- return nil, err
- }
-
- def, err := findGoType(prog, input.Package, input.GoType)
- if err != nil {
- return nil, errors.Wrap(err, "cannot find type")
- }
- if def != nil {
- input.Marshaler = buildInputMarshaler(typ, def)
- bindErrs := bindObject(def.Type(), input, cfg.StructTag)
- if len(bindErrs) > 0 {
- return nil, bindErrs
- }
- }
-
- inputs = append(inputs, input)
- }
- }
-
- sort.Slice(inputs, func(i, j int) bool {
- return inputs[i].GQLType < inputs[j].GQLType
- })
-
- return inputs, nil
-}
-
-func (cfg *Config) buildInput(types NamedTypes, typ *ast.Definition) (*Object, error) {
- obj := &Object{NamedType: types[typ.Name]}
- typeEntry, entryExists := cfg.Models[typ.Name]
-
- for _, field := range typ.Fields {
- newField := Field{
- GQLName: field.Name,
- Type: types.getType(field.Type),
- Object: obj,
- }
-
- if entryExists {
- if typeField, ok := typeEntry.Fields[field.Name]; ok {
- newField.GoFieldName = typeField.FieldName
- }
- }
-
- if field.DefaultValue != nil {
- var err error
- newField.Default, err = field.DefaultValue.Value(nil)
- if err != nil {
- return nil, errors.Errorf("default value for %s.%s is not valid: %s", typ.Name, field.Name, err.Error())
- }
- }
-
- if !newField.Type.IsInput && !newField.Type.IsScalar {
- return nil, errors.Errorf("%s cannot be used as a field of %s. only input and scalar types are allowed", newField.GQLType, obj.GQLType)
- }
-
- obj.Fields = append(obj.Fields, newField)
-
- }
- return obj, nil
-}
-
-// if user has implemented an UnmarshalGQL method on the input type manually, use it
-// otherwise we will generate one.
-func buildInputMarshaler(typ *ast.Definition, def types.Object) *Ref {
- switch def := def.(type) {
- case *types.TypeName:
- namedType := def.Type().(*types.Named)
- for i := 0; i < namedType.NumMethods(); i++ {
- method := namedType.Method(i)
- if method.Name() == "UnmarshalGQL" {
- return nil
- }
- }
- }
-
- return &Ref{GoType: typ.Name}
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/interface.go b/vendor/github.com/99designs/gqlgen/codegen/interface.go
index 2de0c88a..f59e8ed0 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/interface.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/interface.go
@@ -1,13 +1,63 @@
package codegen
-type Interface struct {
- *NamedType
+import (
+ "go/types"
+
+ "github.com/vektah/gqlparser/ast"
+)
+type Interface struct {
+ *ast.Definition
+ Type types.Type
Implementors []InterfaceImplementor
+ InTypemap bool
}
type InterfaceImplementor struct {
- ValueReceiver bool
+ *ast.Definition
+
+ Interface *Interface
+ Type types.Type
+}
+
+func (b *builder) buildInterface(typ *ast.Definition) *Interface {
+ obj, err := b.Binder.DefaultUserObject(typ.Name)
+ if err != nil {
+ panic(err)
+ }
+
+ i := &Interface{
+ Definition: typ,
+ Type: obj,
+ InTypemap: b.Config.Models.UserDefined(typ.Name),
+ }
+
+ for _, implementor := range b.Schema.GetPossibleTypes(typ) {
+ obj, err := b.Binder.DefaultUserObject(implementor.Name)
+ if err != nil {
+ panic(err)
+ }
+
+ i.Implementors = append(i.Implementors, InterfaceImplementor{
+ Definition: implementor,
+ Type: obj,
+ Interface: i,
+ })
+ }
+
+ return i
+}
+
+func (i *InterfaceImplementor) ValueReceiver() bool {
+ interfaceType, err := findGoInterface(i.Interface.Type)
+ if interfaceType == nil || err != nil {
+ return true
+ }
+
+ implementorType, err := findGoNamedType(i.Type)
+ if implementorType == nil || err != nil {
+ return true
+ }
- *NamedType
+ return types.Implements(implementorType, interfaceType)
}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/interface.gotpl b/vendor/github.com/99designs/gqlgen/codegen/interface.gotpl
new file mode 100644
index 00000000..81a58076
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/interface.gotpl
@@ -0,0 +1,20 @@
+{{- range $interface := .Interfaces }}
+
+func (ec *executionContext) _{{$interface.Name}}(ctx context.Context, sel ast.SelectionSet, obj *{{$interface.Type | ref}}) graphql.Marshaler {
+ switch obj := (*obj).(type) {
+ case nil:
+ return graphql.Null
+ {{- range $implementor := $interface.Implementors }}
+ {{- if $implementor.ValueReceiver }}
+ case {{$implementor.Type | ref}}:
+ return ec._{{$implementor.Name}}(ctx, sel, &obj)
+ {{- end}}
+ case *{{$implementor.Type | ref}}:
+ return ec._{{$implementor.Name}}(ctx, sel, obj)
+ {{- end }}
+ default:
+ panic(fmt.Errorf("unexpected type %T", obj))
+ }
+}
+
+{{- end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/interface_build.go b/vendor/github.com/99designs/gqlgen/codegen/interface_build.go
deleted file mode 100644
index 92052ba6..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/interface_build.go
+++ /dev/null
@@ -1,53 +0,0 @@
-package codegen
-
-import (
- "go/types"
- "sort"
-
- "github.com/vektah/gqlparser/ast"
- "golang.org/x/tools/go/loader"
-)
-
-func (cfg *Config) buildInterfaces(types NamedTypes, prog *loader.Program) []*Interface {
- var interfaces []*Interface
- for _, typ := range cfg.schema.Types {
- if typ.Kind == ast.Union || typ.Kind == ast.Interface {
- interfaces = append(interfaces, cfg.buildInterface(types, typ, prog))
- }
- }
-
- sort.Slice(interfaces, func(i, j int) bool {
- return interfaces[i].GQLType < interfaces[j].GQLType
- })
-
- return interfaces
-}
-
-func (cfg *Config) buildInterface(types NamedTypes, typ *ast.Definition, prog *loader.Program) *Interface {
- i := &Interface{NamedType: types[typ.Name]}
-
- for _, implementor := range cfg.schema.GetPossibleTypes(typ) {
- t := types[implementor.Name]
-
- i.Implementors = append(i.Implementors, InterfaceImplementor{
- NamedType: t,
- ValueReceiver: cfg.isValueReceiver(types[typ.Name], t, prog),
- })
- }
-
- return i
-}
-
-func (cfg *Config) isValueReceiver(intf *NamedType, implementor *NamedType, prog *loader.Program) bool {
- interfaceType, err := findGoInterface(prog, intf.Package, intf.GoType)
- if interfaceType == nil || err != nil {
- return true
- }
-
- implementorType, err := findGoNamedType(prog, implementor.Package, implementor.GoType)
- if implementorType == nil || err != nil {
- return true
- }
-
- return types.Implements(implementorType, interfaceType)
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/model.go b/vendor/github.com/99designs/gqlgen/codegen/model.go
deleted file mode 100644
index bcdc8703..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/model.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package codegen
-
-type Model struct {
- *NamedType
- Description string
- Fields []ModelField
- Implements []*NamedType
-}
-
-type ModelField struct {
- *Type
- GQLName string
- GoFieldName string
- GoFKName string
- GoFKType string
- Description string
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/models_build.go b/vendor/github.com/99designs/gqlgen/codegen/models_build.go
deleted file mode 100644
index 56d2ff1f..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/models_build.go
+++ /dev/null
@@ -1,91 +0,0 @@
-package codegen
-
-import (
- "sort"
-
- "github.com/vektah/gqlparser/ast"
- "golang.org/x/tools/go/loader"
-)
-
-func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model, error) {
- var models []Model
-
- for _, typ := range cfg.schema.Types {
- var model Model
- switch typ.Kind {
- case ast.Object:
- obj, err := cfg.buildObject(types, typ)
- if err != nil {
- return nil, err
- }
- if obj.Root || obj.IsUserDefined {
- continue
- }
- model = cfg.obj2Model(obj)
- case ast.InputObject:
- obj, err := cfg.buildInput(types, typ)
- if err != nil {
- return nil, err
- }
- if obj.IsUserDefined {
- continue
- }
- model = cfg.obj2Model(obj)
- case ast.Interface, ast.Union:
- intf := cfg.buildInterface(types, typ, prog)
- if intf.IsUserDefined {
- continue
- }
- model = int2Model(intf)
- default:
- continue
- }
- model.Description = typ.Description // It's this or change both obj2Model and buildObject
-
- models = append(models, model)
- }
-
- sort.Slice(models, func(i, j int) bool {
- return models[i].GQLType < models[j].GQLType
- })
-
- return models, nil
-}
-
-func (cfg *Config) obj2Model(obj *Object) Model {
- model := Model{
- NamedType: obj.NamedType,
- Implements: obj.Implements,
- Fields: []ModelField{},
- }
-
- model.GoType = ucFirst(obj.GQLType)
- model.Marshaler = &Ref{GoType: obj.GoType}
-
- for i := range obj.Fields {
- field := &obj.Fields[i]
- mf := ModelField{Type: field.Type, GQLName: field.GQLName}
-
- if field.GoFieldName != "" {
- mf.GoFieldName = field.GoFieldName
- } else {
- mf.GoFieldName = field.GoNameExported()
- }
-
- model.Fields = append(model.Fields, mf)
- }
-
- return model
-}
-
-func int2Model(obj *Interface) Model {
- model := Model{
- NamedType: obj.NamedType,
- Fields: []ModelField{},
- }
-
- model.GoType = ucFirst(obj.GQLType)
- model.Marshaler = &Ref{GoType: obj.GoType}
-
- return model
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/object.go b/vendor/github.com/99designs/gqlgen/codegen/object.go
index 656af297..539c3164 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/object.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/object.go
@@ -1,13 +1,13 @@
package codegen
import (
- "bytes"
- "fmt"
+ "go/types"
"strconv"
"strings"
- "text/template"
"unicode"
+ "github.com/99designs/gqlgen/codegen/config"
+ "github.com/pkg/errors"
"github.com/vektah/gqlparser/ast"
)
@@ -17,337 +17,150 @@ const (
GoFieldUndefined GoFieldType = iota
GoFieldMethod
GoFieldVariable
+ GoFieldMap
)
type Object struct {
- *NamedType
+ *ast.Definition
- Fields []Field
- Satisfies []string
- Implements []*NamedType
- ResolverInterface *Ref
+ Type types.Type
+ ResolverInterface types.Type
Root bool
+ Fields []*Field
+ Implements []*ast.Definition
DisableConcurrency bool
Stream bool
+ Directives []*Directive
}
-type Field struct {
- *Type
- Description string // Description of a field
- GQLName string // The name of the field in graphql
- GoFieldType GoFieldType // The field type in go, if any
- GoReceiverName string // The name of method & var receiver in go, if any
- GoFieldName string // The name of the method or var in go, if any
- Args []FieldArgument // A list of arguments to be passed to this field
- ForceResolver bool // Should be emit Resolver method
- MethodHasContext bool // If this is bound to a go method, does the method also take a context
- NoErr bool // If this is bound to a go method, does that method have an error as the second argument
- Object *Object // A link back to the parent object
- Default interface{} // The default value
-}
-
-type FieldArgument struct {
- *Type
-
- GQLName string // The name of the argument in graphql
- GoVarName string // The name of the var in go
- Object *Object // A link back to the parent object
- Default interface{} // The default value
-}
-
-type Objects []*Object
-
-func (o *Object) Implementors() string {
- satisfiedBy := strconv.Quote(o.GQLType)
- for _, s := range o.Satisfies {
- satisfiedBy += ", " + strconv.Quote(s)
- }
- return "[]string{" + satisfiedBy + "}"
-}
-
-func (o *Object) HasResolvers() bool {
- for _, f := range o.Fields {
- if f.IsResolver() {
- return true
+func (b *builder) buildObject(typ *ast.Definition) (*Object, error) {
+ dirs, err := b.getDirectives(typ.Directives)
+ if err != nil {
+ return nil, errors.Wrap(err, typ.Name)
+ }
+
+ obj := &Object{
+ Definition: typ,
+ Root: b.Schema.Query == typ || b.Schema.Mutation == typ || b.Schema.Subscription == typ,
+ DisableConcurrency: typ == b.Schema.Mutation,
+ Stream: typ == b.Schema.Subscription,
+ Directives: dirs,
+ ResolverInterface: types.NewNamed(
+ types.NewTypeName(0, b.Config.Exec.Pkg(), typ.Name+"Resolver", nil),
+ nil,
+ nil,
+ ),
+ }
+
+ if !obj.Root {
+ goObject, err := b.Binder.DefaultUserObject(typ.Name)
+ if err != nil {
+ return nil, err
}
+ obj.Type = goObject
}
- return false
-}
-func (o *Object) IsConcurrent() bool {
- for _, f := range o.Fields {
- if f.IsConcurrent() {
- return true
- }
+ for _, intf := range b.Schema.GetImplements(typ) {
+ obj.Implements = append(obj.Implements, b.Schema.Types[intf.Name])
}
- return false
-}
-
-func (o *Object) IsReserved() bool {
- return strings.HasPrefix(o.GQLType, "__")
-}
-func (f *Field) IsResolver() bool {
- return f.GoFieldName == ""
-}
-
-func (f *Field) IsReserved() bool {
- return strings.HasPrefix(f.GQLName, "__")
-}
-
-func (f *Field) IsMethod() bool {
- return f.GoFieldType == GoFieldMethod
-}
+ for _, field := range typ.Fields {
+ if strings.HasPrefix(field.Name, "__") {
+ continue
+ }
-func (f *Field) IsVariable() bool {
- return f.GoFieldType == GoFieldVariable
-}
+ var f *Field
+ f, err = b.buildField(obj, field)
+ if err != nil {
+ return nil, err
+ }
-func (f *Field) IsConcurrent() bool {
- if f.Object.DisableConcurrency {
- return false
+ obj.Fields = append(obj.Fields, f)
}
- return f.MethodHasContext || f.IsResolver()
-}
-func (f *Field) GoNameExported() string {
- return lintName(ucFirst(f.GQLName))
+ return obj, nil
}
-func (f *Field) GoNameUnexported() string {
- return lintName(f.GQLName)
-}
-
-func (f *Field) ShortInvocation() string {
- if !f.IsResolver() {
- return ""
+func (o *Object) Reference() types.Type {
+ switch o.Type.(type) {
+ case *types.Pointer, *types.Slice, *types.Map:
+ return o.Type
}
- return fmt.Sprintf("%s().%s(%s)", f.Object.GQLType, f.GoNameExported(), f.CallArgs())
+ return types.NewPointer(o.Type)
}
-func (f *Field) ArgsFunc() string {
- if len(f.Args) == 0 {
- return ""
- }
-
- return "field_" + f.Object.GQLType + "_" + f.GQLName + "_args"
-}
+type Objects []*Object
-func (f *Field) ResolverType() string {
- if !f.IsResolver() {
- return ""
+func (o *Object) Implementors() string {
+ satisfiedBy := strconv.Quote(o.Name)
+ for _, s := range o.Implements {
+ satisfiedBy += ", " + strconv.Quote(s.Name)
}
-
- return fmt.Sprintf("%s().%s(%s)", f.Object.GQLType, f.GoNameExported(), f.CallArgs())
+ return "[]string{" + satisfiedBy + "}"
}
-func (f *Field) ShortResolverDeclaration() string {
- if !f.IsResolver() {
- return ""
- }
- res := fmt.Sprintf("%s(ctx context.Context", f.GoNameExported())
-
- if !f.Object.Root {
- res += fmt.Sprintf(", obj *%s", f.Object.FullName())
- }
- for _, arg := range f.Args {
- res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
- }
-
- result := f.Signature()
- if f.Object.Stream {
- result = "<-chan " + result
+func (o *Object) HasResolvers() bool {
+ for _, f := range o.Fields {
+ if f.IsResolver {
+ return true
+ }
}
-
- res += fmt.Sprintf(") (%s, error)", result)
- return res
+ return false
}
-func (f *Field) ResolverDeclaration() string {
- if !f.IsResolver() {
- return ""
- }
- res := fmt.Sprintf("%s_%s(ctx context.Context", f.Object.GQLType, f.GoNameUnexported())
-
- if !f.Object.Root {
- res += fmt.Sprintf(", obj *%s", f.Object.FullName())
+func (o *Object) HasUnmarshal() bool {
+ if o.Type == config.MapType {
+ return true
}
- for _, arg := range f.Args {
- res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
- }
-
- result := f.Signature()
- if f.Object.Stream {
- result = "<-chan " + result
+ for i := 0; i < o.Type.(*types.Named).NumMethods(); i++ {
+ switch o.Type.(*types.Named).Method(i).Name() {
+ case "UnmarshalGQL":
+ return true
+ }
}
-
- res += fmt.Sprintf(") (%s, error)", result)
- return res
+ return false
}
-func (f *Field) ComplexitySignature() string {
- res := fmt.Sprintf("func(childComplexity int")
- for _, arg := range f.Args {
- res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
+func (o *Object) HasDirectives() bool {
+ if len(o.Directives) > 0 {
+ return true
}
- res += ") int"
- return res
-}
-
-func (f *Field) ComplexityArgs() string {
- var args []string
- for _, arg := range f.Args {
- args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
+ for _, f := range o.Fields {
+ if f.HasDirectives() {
+ return true
+ }
}
- return strings.Join(args, ", ")
+ return false
}
-func (f *Field) CallArgs() string {
- var args []string
-
- if f.IsResolver() {
- args = append(args, "rctx")
-
- if !f.Object.Root {
- args = append(args, "obj")
- }
- } else {
- if f.MethodHasContext {
- args = append(args, "ctx")
+func (o *Object) IsConcurrent() bool {
+ for _, f := range o.Fields {
+ if f.IsConcurrent() {
+ return true
}
}
-
- for _, arg := range f.Args {
- args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
- }
-
- return strings.Join(args, ", ")
-}
-
-// should be in the template, but its recursive and has a bunch of args
-func (f *Field) WriteJson() string {
- return f.doWriteJson("res", f.Type.Modifiers, f.ASTType, false, 1)
+ return false
}
-func (f *Field) doWriteJson(val string, remainingMods []string, astType *ast.Type, isPtr bool, depth int) string {
- switch {
- case len(remainingMods) > 0 && remainingMods[0] == modPtr:
- return tpl(`
- if {{.val}} == nil {
- {{- if .nonNull }}
- if !ec.HasError(rctx) {
- ec.Errorf(ctx, "must not be null")
- }
- {{- end }}
- return graphql.Null
- }
- {{.next }}`, map[string]interface{}{
- "val": val,
- "nonNull": astType.NonNull,
- "next": f.doWriteJson(val, remainingMods[1:], astType, true, depth+1),
- })
-
- case len(remainingMods) > 0 && remainingMods[0] == modList:
- if isPtr {
- val = "*" + val
- }
- var arr = "arr" + strconv.Itoa(depth)
- var index = "idx" + strconv.Itoa(depth)
- var usePtr bool
- if len(remainingMods) == 1 && !isPtr {
- usePtr = true
- }
-
- return tpl(`
- {{.arr}} := make(graphql.Array, len({{.val}}))
- {{ if and .top (not .isScalar) }} var wg sync.WaitGroup {{ end }}
- {{ if not .isScalar }}
- isLen1 := len({{.val}}) == 1
- if !isLen1 {
- wg.Add(len({{.val}}))
- }
- {{ end }}
- for {{.index}} := range {{.val}} {
- {{- if not .isScalar }}
- {{.index}} := {{.index}}
- rctx := &graphql.ResolverContext{
- Index: &{{.index}},
- Result: {{ if .usePtr }}&{{end}}{{.val}}[{{.index}}],
- }
- ctx := graphql.WithResolverContext(ctx, rctx)
- f := func({{.index}} int) {
- if !isLen1 {
- defer wg.Done()
- }
- {{.arr}}[{{.index}}] = func() graphql.Marshaler {
- {{ .next }}
- }()
- }
- if isLen1 {
- f({{.index}})
- } else {
- go f({{.index}})
- }
- {{ else }}
- {{.arr}}[{{.index}}] = func() graphql.Marshaler {
- {{ .next }}
- }()
- {{- end}}
- }
- {{ if and .top (not .isScalar) }} wg.Wait() {{ end }}
- return {{.arr}}`, map[string]interface{}{
- "val": val,
- "arr": arr,
- "index": index,
- "top": depth == 1,
- "arrayLen": len(val),
- "isScalar": f.IsScalar,
- "usePtr": usePtr,
- "next": f.doWriteJson(val+"["+index+"]", remainingMods[1:], astType.Elem, false, depth+1),
- })
-
- case f.IsScalar:
- if isPtr {
- val = "*" + val
- }
- return f.Marshal(val)
-
- default:
- if !isPtr {
- val = "&" + val
- }
- return tpl(`
- return ec._{{.type}}(ctx, field.Selections, {{.val}})`, map[string]interface{}{
- "type": f.GQLType,
- "val": val,
- })
- }
+func (o *Object) IsReserved() bool {
+ return strings.HasPrefix(o.Definition.Name, "__")
}
-func (f *FieldArgument) Stream() bool {
- return f.Object != nil && f.Object.Stream
+func (o *Object) Description() string {
+ return o.Definition.Description
}
func (os Objects) ByName(name string) *Object {
for i, o := range os {
- if strings.EqualFold(o.GQLType, name) {
+ if strings.EqualFold(o.Definition.Name, name) {
return os[i]
}
}
return nil
}
-func tpl(tpl string, vars map[string]interface{}) string {
- b := &bytes.Buffer{}
- err := template.Must(template.New("inline").Parse(tpl)).Execute(b, vars)
- if err != nil {
- panic(err)
- }
- return b.String()
-}
-
func ucFirst(s string) string {
if s == "" {
return ""
@@ -357,117 +170,3 @@ func ucFirst(s string) string {
r[0] = unicode.ToUpper(r[0])
return string(r)
}
-
-// copy from https://github.com/golang/lint/blob/06c8688daad7faa9da5a0c2f163a3d14aac986ca/lint.go#L679
-
-// lintName returns a different name if it should be different.
-func lintName(name string) (should string) {
- // Fast path for simple cases: "_" and all lowercase.
- if name == "_" {
- return name
- }
- allLower := true
- for _, r := range name {
- if !unicode.IsLower(r) {
- allLower = false
- break
- }
- }
- if allLower {
- return name
- }
-
- // Split camelCase at any lower->upper transition, and split on underscores.
- // Check each word for common initialisms.
- runes := []rune(name)
- w, i := 0, 0 // index of start of word, scan
- for i+1 <= len(runes) {
- eow := false // whether we hit the end of a word
- if i+1 == len(runes) {
- eow = true
- } else if runes[i+1] == '_' {
- // underscore; shift the remainder forward over any run of underscores
- eow = true
- n := 1
- for i+n+1 < len(runes) && runes[i+n+1] == '_' {
- n++
- }
-
- // Leave at most one underscore if the underscore is between two digits
- if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) {
- n--
- }
-
- copy(runes[i+1:], runes[i+n+1:])
- runes = runes[:len(runes)-n]
- } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) {
- // lower->non-lower
- eow = true
- }
- i++
- if !eow {
- continue
- }
-
- // [w,i) is a word.
- word := string(runes[w:i])
- if u := strings.ToUpper(word); commonInitialisms[u] {
- // Keep consistent case, which is lowercase only at the start.
- if w == 0 && unicode.IsLower(runes[w]) {
- u = strings.ToLower(u)
- }
- // All the common initialisms are ASCII,
- // so we can replace the bytes exactly.
- copy(runes[w:], []rune(u))
- } else if w > 0 && strings.ToLower(word) == word {
- // already all lowercase, and not the first word, so uppercase the first character.
- runes[w] = unicode.ToUpper(runes[w])
- }
- w = i
- }
- return string(runes)
-}
-
-// commonInitialisms is a set of common initialisms.
-// Only add entries that are highly unlikely to be non-initialisms.
-// For instance, "ID" is fine (Freudian code is rare), but "AND" is not.
-var commonInitialisms = map[string]bool{
- "ACL": true,
- "API": true,
- "ASCII": true,
- "CPU": true,
- "CSS": true,
- "DNS": true,
- "EOF": true,
- "GUID": true,
- "HTML": true,
- "HTTP": true,
- "HTTPS": true,
- "ID": true,
- "IP": true,
- "JSON": true,
- "LHS": true,
- "QPS": true,
- "RAM": true,
- "RHS": true,
- "RPC": true,
- "SLA": true,
- "SMTP": true,
- "SQL": true,
- "SSH": true,
- "TCP": true,
- "TLS": true,
- "TTL": true,
- "UDP": true,
- "UI": true,
- "UID": true,
- "UUID": true,
- "URI": true,
- "URL": true,
- "UTF8": true,
- "VM": true,
- "XML": true,
- "XMPP": true,
- "XSRF": true,
- "XSS": true,
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/object.gotpl b/vendor/github.com/99designs/gqlgen/codegen/object.gotpl
new file mode 100644
index 00000000..19da1b19
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/object.gotpl
@@ -0,0 +1,77 @@
+{{- range $object := .Objects }}
+
+var {{ $object.Name|lcFirst}}Implementors = {{$object.Implementors}}
+
+{{- if .Stream }}
+func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.SelectionSet) func() graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, {{$object.Name|lcFirst}}Implementors)
+ ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ Object: {{$object.Name|quote}},
+ })
+ if len(fields) != 1 {
+ ec.Errorf(ctx, "must subscribe to exactly one stream")
+ return nil
+ }
+
+ switch fields[0].Name {
+ {{- range $field := $object.Fields }}
+ case "{{$field.Name}}":
+ return ec._{{$object.Name}}_{{$field.Name}}(ctx, fields[0])
+ {{- end }}
+ default:
+ panic("unknown field " + strconv.Quote(fields[0].Name))
+ }
+}
+{{- else }}
+func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.SelectionSet{{ if not $object.Root }},obj {{$object.Reference | ref }}{{ end }}) graphql.Marshaler {
+ fields := graphql.CollectFields(ctx, sel, {{$object.Name|lcFirst}}Implementors)
+ {{if $object.Root}}
+ ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
+ Object: {{$object.Name|quote}},
+ })
+ {{end}}
+
+ out := graphql.NewFieldSet(fields)
+ invalid := false
+ for i, field := range fields {
+ switch field.Name {
+ case "__typename":
+ out.Values[i] = graphql.MarshalString({{$object.Name|quote}})
+ {{- range $field := $object.Fields }}
+ case "{{$field.Name}}":
+ {{- if $field.IsConcurrent }}
+ field := field
+ out.Concurrently(i, func() (res graphql.Marshaler) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ }
+ }()
+ res = ec._{{$object.Name}}_{{$field.Name}}(ctx, field{{if not $object.Root}}, obj{{end}})
+ {{- if $field.TypeReference.GQL.NonNull }}
+ if res == graphql.Null {
+ invalid = true
+ }
+ {{- end }}
+ return res
+ })
+ {{- else }}
+ out.Values[i] = ec._{{$object.Name}}_{{$field.Name}}(ctx, field{{if not $object.Root}}, obj{{end}})
+ {{- if $field.TypeReference.GQL.NonNull }}
+ if out.Values[i] == graphql.Null {
+ invalid = true
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ default:
+ panic("unknown field " + strconv.Quote(field.Name))
+ }
+ }
+ out.Dispatch()
+ if invalid { return graphql.Null }
+ return out
+}
+{{- end }}
+
+{{- end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/object_build.go b/vendor/github.com/99designs/gqlgen/codegen/object_build.go
deleted file mode 100644
index 279d1eb6..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/object_build.go
+++ /dev/null
@@ -1,181 +0,0 @@
-package codegen
-
-import (
- "log"
- "sort"
-
- "github.com/pkg/errors"
- "github.com/vektah/gqlparser/ast"
- "golang.org/x/tools/go/loader"
-)
-
-func (cfg *Config) buildObjects(types NamedTypes, prog *loader.Program) (Objects, error) {
- var objects Objects
-
- for _, typ := range cfg.schema.Types {
- if typ.Kind != ast.Object {
- continue
- }
-
- obj, err := cfg.buildObject(types, typ)
- if err != nil {
- return nil, err
- }
-
- def, err := findGoType(prog, obj.Package, obj.GoType)
- if err != nil {
- return nil, err
- }
- if def != nil {
- for _, bindErr := range bindObject(def.Type(), obj, cfg.StructTag) {
- log.Println(bindErr.Error())
- log.Println(" Adding resolver method")
- }
- }
-
- objects = append(objects, obj)
- }
-
- sort.Slice(objects, func(i, j int) bool {
- return objects[i].GQLType < objects[j].GQLType
- })
-
- return objects, nil
-}
-
-var keywords = []string{
- "break",
- "default",
- "func",
- "interface",
- "select",
- "case",
- "defer",
- "go",
- "map",
- "struct",
- "chan",
- "else",
- "goto",
- "package",
- "switch",
- "const",
- "fallthrough",
- "if",
- "range",
- "type",
- "continue",
- "for",
- "import",
- "return",
- "var",
-}
-
-// sanitizeArgName prevents collisions with go keywords for arguments to resolver functions
-func sanitizeArgName(name string) string {
- for _, k := range keywords {
- if name == k {
- return name + "Arg"
- }
- }
- return name
-}
-
-func (cfg *Config) buildObject(types NamedTypes, typ *ast.Definition) (*Object, error) {
- obj := &Object{NamedType: types[typ.Name]}
- typeEntry, entryExists := cfg.Models[typ.Name]
-
- obj.ResolverInterface = &Ref{GoType: obj.GQLType + "Resolver"}
-
- if typ == cfg.schema.Query {
- obj.Root = true
- }
-
- if typ == cfg.schema.Mutation {
- obj.Root = true
- obj.DisableConcurrency = true
- }
-
- if typ == cfg.schema.Subscription {
- obj.Root = true
- obj.Stream = true
- }
-
- obj.Satisfies = append(obj.Satisfies, typ.Interfaces...)
-
- for _, intf := range cfg.schema.GetImplements(typ) {
- obj.Implements = append(obj.Implements, types[intf.Name])
- }
-
- for _, field := range typ.Fields {
- if typ == cfg.schema.Query && field.Name == "__type" {
- obj.Fields = append(obj.Fields, Field{
- Type: &Type{types["__Schema"], []string{modPtr}, ast.NamedType("__Schema", nil), nil},
- GQLName: "__schema",
- GoFieldType: GoFieldMethod,
- GoReceiverName: "ec",
- GoFieldName: "introspectSchema",
- Object: obj,
- Description: field.Description,
- })
- continue
- }
- if typ == cfg.schema.Query && field.Name == "__schema" {
- obj.Fields = append(obj.Fields, Field{
- Type: &Type{types["__Type"], []string{modPtr}, ast.NamedType("__Schema", nil), nil},
- GQLName: "__type",
- GoFieldType: GoFieldMethod,
- GoReceiverName: "ec",
- GoFieldName: "introspectType",
- Args: []FieldArgument{
- {GQLName: "name", Type: &Type{types["String"], []string{}, ast.NamedType("String", nil), nil}, Object: &Object{}},
- },
- Object: obj,
- })
- continue
- }
-
- var forceResolver bool
- var goName string
- if entryExists {
- if typeField, ok := typeEntry.Fields[field.Name]; ok {
- goName = typeField.FieldName
- forceResolver = typeField.Resolver
- }
- }
-
- var args []FieldArgument
- for _, arg := range field.Arguments {
- newArg := FieldArgument{
- GQLName: arg.Name,
- Type: types.getType(arg.Type),
- Object: obj,
- GoVarName: sanitizeArgName(arg.Name),
- }
-
- if !newArg.Type.IsInput && !newArg.Type.IsScalar {
- return nil, errors.Errorf("%s cannot be used as argument of %s.%s. only input and scalar types are allowed", arg.Type, obj.GQLType, field.Name)
- }
-
- if arg.DefaultValue != nil {
- var err error
- newArg.Default, err = arg.DefaultValue.Value(nil)
- if err != nil {
- return nil, errors.Errorf("default value for %s.%s is not valid: %s", typ.Name, field.Name, err.Error())
- }
- }
- args = append(args, newArg)
- }
-
- obj.Fields = append(obj.Fields, Field{
- GQLName: field.Name,
- Type: types.getType(field.Type),
- Args: args,
- Object: obj,
- GoFieldName: goName,
- ForceResolver: forceResolver,
- })
- }
-
- return obj, nil
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/args.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/args.gotpl
deleted file mode 100644
index 870a99ed..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/args.gotpl
+++ /dev/null
@@ -1,13 +0,0 @@
- args := map[string]interface{}{}
- {{- range $i, $arg := . }}
- var arg{{$i}} {{$arg.Signature }}
- if tmp, ok := rawArgs[{{$arg.GQLName|quote}}]; ok {
- var err error
- {{$arg.Unmarshal (print "arg" $i) "tmp" }}
- if err != nil {
- return nil, err
- }
- }
- args[{{$arg.GQLName|quote}}] = arg{{$i}}
- {{- end }}
- return args, nil
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/data.go b/vendor/github.com/99designs/gqlgen/codegen/templates/data.go
deleted file mode 100644
index d3098aaa..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/data.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package templates
-
-var data = map[string]string{
- "args.gotpl": "\targs := map[string]interface{}{}\n\t{{- range $i, $arg := . }}\n\t\tvar arg{{$i}} {{$arg.Signature }}\n\t\tif tmp, ok := rawArgs[{{$arg.GQLName|quote}}]; ok {\n\t\t\tvar err error\n\t\t\t{{$arg.Unmarshal (print \"arg\" $i) \"tmp\" }}\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\targs[{{$arg.GQLName|quote}}] = arg{{$i}}\n\t{{- end }}\n\treturn args, nil\n",
- "field.gotpl": "{{ $field := . }}\n{{ $object := $field.Object }}\n\n{{- if $object.Stream }}\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {\n\t\t{{- if $field.Args }}\n\t\t\trawArgs := field.ArgumentMap(ec.Variables)\n\t\t\targs, err := {{ $field.ArgsFunc }}(rawArgs)\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\treturn nil\n\t\t\t}\n\t\t{{- end }}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\tField: field,\n\t\t})\n\t\t// FIXME: subscriptions are missing request middleware stack https://github.com/99designs/gqlgen/issues/259\n\t\t// and Tracer stack\n\t\trctx := ctx\n\t\tresults, err := ec.resolvers.{{ $field.ShortInvocation }}\n\t\tif err != nil {\n\t\t\tec.Error(ctx, err)\n\t\t\treturn nil\n\t\t}\n\t\treturn func() graphql.Marshaler {\n\t\t\tres, ok := <-results\n\t\t\tif !ok {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tvar out graphql.OrderedMap\n\t\t\tout.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())\n\t\t\treturn &out\n\t\t}\n\t}\n{{ else }}\n\t// nolint: vetshadow\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {\n\t\tctx = ec.Tracer.StartFieldExecution(ctx, field)\n\t\tdefer func () { ec.Tracer.EndFieldExecution(ctx) }()\n\t\t{{- if $field.Args }}\n\t\t\trawArgs := field.ArgumentMap(ec.Variables)\n\t\t\targs, err := {{ $field.ArgsFunc }}(rawArgs)\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\treturn graphql.Null\n\t\t\t}\n\t\t{{- end }}\n\t\trctx := &graphql.ResolverContext{\n\t\t\tObject: {{$object.GQLType|quote}},\n\t\t\tArgs: {{if $field.Args }}args{{else}}nil{{end}},\n\t\t\tField: field,\n\t\t}\n\t\tctx = graphql.WithResolverContext(ctx, rctx)\n\t\tctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)\n\t\tresTmp := ec.FieldMiddleware(ctx, {{if $object.Root}}nil{{else}}obj{{end}}, func(rctx context.Context) (interface{}, error) {\n\t\t\tctx = rctx // use context from middleware stack in children\n\t\t\t{{- if $field.IsResolver }}\n\t\t\t\treturn ec.resolvers.{{ $field.ShortInvocation }}\n\t\t\t{{- else if $field.IsMethod }}\n\t\t\t\t{{- if $field.NoErr }}\n\t\t\t\t\treturn {{$field.GoReceiverName}}.{{$field.GoFieldName}}({{ $field.CallArgs }}), nil\n\t\t\t\t{{- else }}\n\t\t\t\t\treturn {{$field.GoReceiverName}}.{{$field.GoFieldName}}({{ $field.CallArgs }})\n\t\t\t\t{{- end }}\n\t\t\t{{- else if $field.IsVariable }}\n\t\t\t\treturn {{$field.GoReceiverName}}.{{$field.GoFieldName}}, nil\n\t\t\t{{- end }}\n\t\t})\n\t\tif resTmp == nil {\n\t\t\t{{- if $field.ASTType.NonNull }}\n\t\t\t\tif !ec.HasError(rctx) {\n\t\t\t\t\tec.Errorf(ctx, \"must not be null\")\n\t\t\t\t}\n\t\t\t{{- end }}\n\t\t\treturn graphql.Null\n\t\t}\n\t\tres := resTmp.({{$field.Signature}})\n\t\trctx.Result = res\n\t\tctx = ec.Tracer.StartFieldChildExecution(ctx)\n\t\t{{ $field.WriteJson }}\n\t}\n{{ end }}\n",
- "generated.gotpl": "// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n\t%%%IMPORTS%%%\n\n\t{{ reserveImport \"context\" }}\n\t{{ reserveImport \"fmt\" }}\n\t{{ reserveImport \"io\" }}\n\t{{ reserveImport \"strconv\" }}\n\t{{ reserveImport \"time\" }}\n\t{{ reserveImport \"sync\" }}\n\t{{ reserveImport \"errors\" }}\n\t{{ reserveImport \"bytes\" }}\n\n\t{{ reserveImport \"github.com/vektah/gqlparser\" }}\n\t{{ reserveImport \"github.com/vektah/gqlparser/ast\" }}\n\t{{ reserveImport \"github.com/99designs/gqlgen/graphql\" }}\n\t{{ reserveImport \"github.com/99designs/gqlgen/graphql/introspection\" }}\n)\n\n// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.\nfunc NewExecutableSchema(cfg Config) graphql.ExecutableSchema {\n\treturn &executableSchema{\n\t\tresolvers: cfg.Resolvers,\n\t\tdirectives: cfg.Directives,\n\t\tcomplexity: cfg.Complexity,\n\t}\n}\n\ntype Config struct {\n\tResolvers ResolverRoot\n\tDirectives DirectiveRoot\n\tComplexity ComplexityRoot\n}\n\ntype ResolverRoot interface {\n{{- range $object := .Objects -}}\n\t{{ if $object.HasResolvers -}}\n\t\t{{$object.GQLType}}() {{$object.GQLType}}Resolver\n\t{{ end }}\n{{- end }}\n}\n\ntype DirectiveRoot struct {\n{{ range $directive := .Directives }}\n\t{{ $directive.Declaration }}\n{{ end }}\n}\n\ntype ComplexityRoot struct {\n{{ range $object := .Objects }}\n\t{{ if not $object.IsReserved -}}\n\t\t{{ $object.GQLType|toCamel }} struct {\n\t\t{{ range $field := $object.Fields -}}\n\t\t\t{{ if not $field.IsReserved -}}\n\t\t\t\t{{ $field.GQLName|toCamel }} {{ $field.ComplexitySignature }}\n\t\t\t{{ end }}\n\t\t{{- end }}\n\t\t}\n\t{{- end }}\n{{ end }}\n}\n\n{{ range $object := .Objects -}}\n\t{{ if $object.HasResolvers }}\n\t\ttype {{$object.GQLType}}Resolver interface {\n\t\t{{ range $field := $object.Fields -}}\n\t\t\t{{ $field.ShortResolverDeclaration }}\n\t\t{{ end }}\n\t\t}\n\t{{- end }}\n{{- end }}\n\n{{ range $object := .Objects -}}\n\t{{ range $field := $object.Fields -}}\n\t\t{{ if $field.Args }}\n\t\t\tfunc {{ $field.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {\n\t\t\t{{ template \"args.gotpl\" $field.Args }}\n\t\t\t}\n\t\t{{ end }}\n\t{{ end }}\n{{- end }}\n\n{{ range $directive := .Directives }}\n\t{{ if $directive.Args }}\n\t\tfunc {{ $directive.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {\n\t\t{{ template \"args.gotpl\" $directive.Args }}\n\t\t}\n\t{{ end }}\n{{ end }}\n\ntype executableSchema struct {\n\tresolvers ResolverRoot\n\tdirectives DirectiveRoot\n\tcomplexity ComplexityRoot\n}\n\nfunc (e *executableSchema) Schema() *ast.Schema {\n\treturn parsedSchema\n}\n\nfunc (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {\n\tswitch typeName + \".\" + field {\n\t{{ range $object := .Objects }}\n\t\t{{ if not $object.IsReserved }}\n\t\t\t{{ range $field := $object.Fields }}\n\t\t\t\t{{ if not $field.IsReserved }}\n\t\t\t\t\tcase \"{{$object.GQLType}}.{{$field.GQLName}}\":\n\t\t\t\t\t\tif e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}} == nil {\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{{ if $field.Args }}\n\t\t\t\t\t\t\targs, err := {{ $field.ArgsFunc }}(rawArgs)\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\treturn 0, false\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t{{ end }}\n\t\t\t\t\t\treturn e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}}(childComplexity{{if $field.Args}}, {{$field.ComplexityArgs}} {{end}}), true\n\t\t\t\t{{ end }}\n\t\t\t{{ end }}\n\t\t{{ end }}\n\t{{ end }}\n\t}\n\treturn 0, false\n}\n\nfunc (e *executableSchema) Query(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {\n\t{{- if .QueryRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.QueryRoot.GQLType}}(ctx, op.SelectionSet)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t\tExtensions: ec.Extensions,\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"queries are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {\n\t{{- if .MutationRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.MutationRoot.GQLType}}(ctx, op.SelectionSet)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t\tExtensions: ec.Extensions,\n\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"mutations are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Subscription(ctx context.Context, op *ast.OperationDefinition) func() *graphql.Response {\n\t{{- if .SubscriptionRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e}\n\n\t\tnext := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.SelectionSet)\n\t\tif ec.Errors != nil {\n\t\t\treturn graphql.OneShot(&graphql.Response{Data: []byte(\"null\"), Errors: ec.Errors})\n\t\t}\n\n\t\tvar buf bytes.Buffer\n\t\treturn func() *graphql.Response {\n\t\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\t\tbuf.Reset()\n\t\t\t\tdata := next()\n\n\t\t\t\tif data == nil {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\tdata.MarshalGQL(&buf)\n\t\t\t\treturn buf.Bytes()\n\t\t\t})\n\n\t\t\tif buf == nil {\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t\treturn &graphql.Response{\n\t\t\t\tData: buf,\n\t\t\t\tErrors: ec.Errors,\n\t\t\t\tExtensions: ec.Extensions,\n\t\t\t}\n\t\t}\n\t{{- else }}\n\t\treturn graphql.OneShot(graphql.ErrorResponse(ctx, \"subscriptions are not supported\"))\n\t{{- end }}\n}\n\ntype executionContext struct {\n\t*graphql.RequestContext\n\t*executableSchema\n}\n\n{{- range $object := .Objects }}\n\t{{ template \"object.gotpl\" $object }}\n\n\t{{- range $field := $object.Fields }}\n\t\t{{ template \"field.gotpl\" $field }}\n\t{{ end }}\n{{- end}}\n\n{{- range $interface := .Interfaces }}\n\t{{ template \"interface.gotpl\" $interface }}\n{{- end }}\n\n{{- range $input := .Inputs }}\n\t{{ template \"input.gotpl\" $input }}\n{{- end }}\n\nfunc (ec *executionContext) FieldMiddleware(ctx context.Context, obj interface{}, next graphql.Resolver) (ret interface{}) {\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\tec.Error(ctx, ec.Recover(ctx, r))\n\t\t\tret = nil\n\t\t}\n\t}()\n\t{{- if .Directives }}\n\trctx := graphql.GetResolverContext(ctx)\n\tfor _, d := range rctx.Field.Definition.Directives {\n\t\tswitch d.Name {\n\t\t{{- range $directive := .Directives }}\n\t\tcase \"{{$directive.Name}}\":\n\t\t\tif ec.directives.{{$directive.Name|ucFirst}} != nil {\n\t\t\t\t{{- if $directive.Args }}\n\t\t\t\t\trawArgs := d.ArgumentMap(ec.Variables)\n\t\t\t\t\targs, err := {{ $directive.ArgsFunc }}(rawArgs)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tec.Error(ctx, err)\n\t\t\t\t\t\treturn nil\n\t\t\t\t\t}\n\t\t\t\t{{- end }}\n\t\t\t\tn := next\n\t\t\t\tnext = func(ctx context.Context) (interface{}, error) {\n\t\t\t\t\treturn ec.directives.{{$directive.Name|ucFirst}}({{$directive.CallArgs}})\n\t\t\t\t}\n\t\t\t}\n\t\t{{- end }}\n\t\t}\n\t}\n\t{{- end }}\n\tres, err := ec.ResolverMiddleware(ctx, next)\n\tif err != nil {\n\t\tec.Error(ctx, err)\n\t\treturn nil\n\t}\n\treturn res\n}\n\nfunc (ec *executionContext) introspectSchema() (*introspection.Schema, error) {\n\tif ec.DisableIntrospection {\n\t\treturn nil, errors.New(\"introspection disabled\")\n\t}\n\treturn introspection.WrapSchema(parsedSchema), nil\n}\n\nfunc (ec *executionContext) introspectType(name string) (*introspection.Type, error) {\n\tif ec.DisableIntrospection {\n\t\treturn nil, errors.New(\"introspection disabled\")\n\t}\n\treturn introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name]), nil\n}\n\nvar parsedSchema = gqlparser.MustLoadSchema(\n\t{{- range $filename, $schema := .SchemaRaw }}\n\t\t&ast.Source{Name: {{$filename|quote}}, Input: {{$schema|rawQuote}}},\n\t{{- end }}\n)\n",
- "input.gotpl": "\t{{- if .IsMarshaled }}\n\tfunc Unmarshal{{ .GQLType }}(v interface{}) ({{.FullName}}, error) {\n\t\tvar it {{.FullName}}\n\t\tvar asMap = v.(map[string]interface{})\n\t\t{{ range $field := .Fields}}\n\t\t\t{{- if $field.Default}}\n\t\t\t\tif _, present := asMap[{{$field.GQLName|quote}}] ; !present {\n\t\t\t\t\tasMap[{{$field.GQLName|quote}}] = {{ $field.Default | dump }}\n\t\t\t\t}\n\t\t\t{{- end}}\n\t\t{{- end }}\n\n\t\tfor k, v := range asMap {\n\t\t\tswitch k {\n\t\t\t{{- range $field := .Fields }}\n\t\t\tcase {{$field.GQLName|quote}}:\n\t\t\t\tvar err error\n\t\t\t\t{{ $field.Unmarshal (print \"it.\" $field.GoFieldName) \"v\" }}\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn it, err\n\t\t\t\t}\n\t\t\t{{- end }}\n\t\t\t}\n\t\t}\n\n\t\treturn it, nil\n\t}\n\t{{- end }}\n",
- "interface.gotpl": "{{- $interface := . }}\n\nfunc (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel ast.SelectionSet, obj *{{$interface.FullName}}) graphql.Marshaler {\n\tswitch obj := (*obj).(type) {\n\tcase nil:\n\t\treturn graphql.Null\n\t{{- range $implementor := $interface.Implementors }}\n\t\t{{- if $implementor.ValueReceiver }}\n\t\t\tcase {{$implementor.FullName}}:\n\t\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, &obj)\n\t\t{{- end}}\n\t\tcase *{{$implementor.FullName}}:\n\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, obj)\n\t{{- end }}\n\tdefault:\n\t\tpanic(fmt.Errorf(\"unexpected type %T\", obj))\n\t}\n}\n",
- "models.gotpl": "// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n\t%%%IMPORTS%%%\n\n\t{{ reserveImport \"context\" }}\n\t{{ reserveImport \"fmt\" }}\n\t{{ reserveImport \"io\" }}\n\t{{ reserveImport \"strconv\" }}\n\t{{ reserveImport \"time\" }}\n\t{{ reserveImport \"sync\" }}\n\t{{ reserveImport \"errors\" }}\n\t{{ reserveImport \"bytes\" }}\n\n\t{{ reserveImport \"github.com/vektah/gqlparser\" }}\n\t{{ reserveImport \"github.com/vektah/gqlparser/ast\" }}\n\t{{ reserveImport \"github.com/99designs/gqlgen/graphql\" }}\n\t{{ reserveImport \"github.com/99designs/gqlgen/graphql/introspection\" }}\n)\n\n{{ range $model := .Models }}\n\t{{with .Description}} {{.|prefixLines \"// \"}} {{end}}\n\t{{- if .IsInterface }}\n\t\ttype {{.GoType}} interface {\n\t\t\tIs{{.GoType}}()\n\t\t}\n\t{{- else }}\n\t\ttype {{.GoType}} struct {\n\t\t\t{{- range $field := .Fields }}\n\t\t\t\t{{- with .Description}}\n\t\t\t\t\t{{.|prefixLines \"// \"}}\n\t\t\t\t{{- end}}\n\t\t\t\t{{- if $field.GoFieldName }}\n\t\t\t\t\t{{ $field.GoFieldName }} {{$field.Signature}} `json:\"{{$field.GQLName}}\"`\n\t\t\t\t{{- else }}\n\t\t\t\t\t{{ $field.GoFKName }} {{$field.GoFKType}}\n\t\t\t\t{{- end }}\n\t\t\t{{- end }}\n\t\t}\n\n\t\t{{- range $iface := .Implements }}\n\t\t\tfunc ({{$model.GoType}}) Is{{$iface.GoType}}() {}\n\t\t{{- end }}\n\n\t{{- end }}\n{{- end}}\n\n{{ range $enum := .Enums }}\n\t{{with .Description}}{{.|prefixLines \"// \"}} {{end}}\n\ttype {{.GoType}} string\n\tconst (\n\t{{- range $value := .Values}}\n\t\t{{- with .Description}}\n\t\t\t{{.|prefixLines \"// \"}}\n\t\t{{- end}}\n\t\t{{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}}\n\t{{- end }}\n\t)\n\n\tfunc (e {{.GoType}}) IsValid() bool {\n\t\tswitch e {\n\t\tcase {{ range $index, $element := .Values}}{{if $index}},{{end}}{{ $enum.GoType }}{{ $element.Name|toCamel }}{{end}}:\n\t\t\treturn true\n\t\t}\n\t\treturn false\n\t}\n\n\tfunc (e {{.GoType}}) String() string {\n\t\treturn string(e)\n\t}\n\n\tfunc (e *{{.GoType}}) UnmarshalGQL(v interface{}) error {\n\t\tstr, ok := v.(string)\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"enums must be strings\")\n\t\t}\n\n\t\t*e = {{.GoType}}(str)\n\t\tif !e.IsValid() {\n\t\t\treturn fmt.Errorf(\"%s is not a valid {{.GQLType}}\", str)\n\t\t}\n\t\treturn nil\n\t}\n\n\tfunc (e {{.GoType}}) MarshalGQL(w io.Writer) {\n\t\tfmt.Fprint(w, strconv.Quote(e.String()))\n\t}\n\n{{- end }}\n",
- "object.gotpl": "{{ $object := . }}\n\nvar {{ $object.GQLType|lcFirst}}Implementors = {{$object.Implementors}}\n\n// nolint: gocyclo, errcheck, gas, goconst\n{{- if .Stream }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet) func() graphql.Marshaler {\n\tfields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)\n\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\tObject: {{$object.GQLType|quote}},\n\t})\n\tif len(fields) != 1 {\n\t\tec.Errorf(ctx, \"must subscribe to exactly one stream\")\n\t\treturn nil\n\t}\n\n\tswitch fields[0].Name {\n\t{{- range $field := $object.Fields }}\n\tcase \"{{$field.GQLName}}\":\n\t\treturn ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, fields[0])\n\t{{- end }}\n\tdefault:\n\t\tpanic(\"unknown field \" + strconv.Quote(fields[0].Name))\n\t}\n}\n{{- else }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {\n\tfields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)\n\t{{if $object.Root}}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\tObject: {{$object.GQLType|quote}},\n\t\t})\n\t{{end}}\n\n\t{{if $object.IsConcurrent}} var wg sync.WaitGroup {{end}}\n\tout := graphql.NewOrderedMap(len(fields))\n\tinvalid := false\n\tfor i, field := range fields {\n\t\tout.Keys[i] = field.Alias\n\n\t\tswitch field.Name {\n\t\tcase \"__typename\":\n\t\t\tout.Values[i] = graphql.MarshalString({{$object.GQLType|quote}})\n\t\t{{- range $field := $object.Fields }}\n\t\tcase \"{{$field.GQLName}}\":\n\t\t\t{{- if $field.IsConcurrent }}\n\t\t\t\twg.Add(1)\n\t\t\t\tgo func(i int, field graphql.CollectedField) {\n\t\t\t{{- end }}\n\t\t\t\tout.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})\n\t\t\t\t{{- if $field.ASTType.NonNull }}\n\t\t\t\t\tif out.Values[i] == graphql.Null {\n\t\t\t\t\t\tinvalid = true\n\t\t\t\t\t}\n\t\t\t\t{{- end }}\n\t\t\t{{- if $field.IsConcurrent }}\n\t\t\t\t\twg.Done()\n\t\t\t\t}(i, field)\n\t\t\t{{- end }}\n\t\t{{- end }}\n\t\tdefault:\n\t\t\tpanic(\"unknown field \" + strconv.Quote(field.Name))\n\t\t}\n\t}\n\t{{if $object.IsConcurrent}} wg.Wait() {{end}}\n\tif invalid { return graphql.Null }\n\treturn out\n}\n{{- end }}\n",
- "resolver.gotpl": "package {{ .PackageName }}\n\nimport (\n\t%%%IMPORTS%%%\n\n\t{{ reserveImport \"context\" }}\n\t{{ reserveImport \"fmt\" }}\n\t{{ reserveImport \"io\" }}\n\t{{ reserveImport \"strconv\" }}\n\t{{ reserveImport \"time\" }}\n\t{{ reserveImport \"sync\" }}\n\t{{ reserveImport \"errors\" }}\n\t{{ reserveImport \"bytes\" }}\n\n\t{{ reserveImport \"github.com/99designs/gqlgen/handler\" }}\n\t{{ reserveImport \"github.com/vektah/gqlparser\" }}\n\t{{ reserveImport \"github.com/vektah/gqlparser/ast\" }}\n\t{{ reserveImport \"github.com/99designs/gqlgen/graphql\" }}\n\t{{ reserveImport \"github.com/99designs/gqlgen/graphql/introspection\" }}\n)\n\ntype {{.ResolverType}} struct {}\n\n{{ range $object := .Objects -}}\n\t{{- if $object.HasResolvers -}}\n\t\tfunc (r *{{$.ResolverType}}) {{$object.GQLType}}() {{ $object.ResolverInterface.FullName }} {\n\t\t\treturn &{{lcFirst $object.GQLType}}Resolver{r}\n\t\t}\n\t{{ end -}}\n{{ end }}\n\n{{ range $object := .Objects -}}\n\t{{- if $object.HasResolvers -}}\n\t\ttype {{lcFirst $object.GQLType}}Resolver struct { *Resolver }\n\n\t\t{{ range $field := $object.Fields -}}\n\t\t\t{{- if $field.IsResolver -}}\n\t\t\tfunc (r *{{lcFirst $object.GQLType}}Resolver) {{ $field.ShortResolverDeclaration }} {\n\t\t\t\tpanic(\"not implemented\")\n\t\t\t}\n\t\t\t{{ end -}}\n\t\t{{ end -}}\n\t{{ end -}}\n{{ end }}\n",
- "server.gotpl": "package main\n\nimport (\n\t%%%IMPORTS%%%\n\n\t{{ reserveImport \"context\" }}\n\t{{ reserveImport \"log\" }}\n\t{{ reserveImport \"net/http\" }}\n\t{{ reserveImport \"os\" }}\n\t{{ reserveImport \"github.com/99designs/gqlgen/handler\" }}\n)\n\nconst defaultPort = \"8080\"\n\nfunc main() {\n\tport := os.Getenv(\"PORT\")\n\tif port == \"\" {\n\t\tport = defaultPort\n\t}\n\n\thttp.Handle(\"/\", handler.Playground(\"GraphQL playground\", \"/query\"))\n\thttp.Handle(\"/query\", handler.GraphQL({{ lookupImport .ExecPackageName }}.NewExecutableSchema({{ lookupImport .ExecPackageName}}.Config{Resolvers: &{{ lookupImport .ResolverPackageName}}.Resolver{}})))\n\n\tlog.Printf(\"connect to http://localhost:%s/ for GraphQL playground\", port)\n\tlog.Fatal(http.ListenAndServe(\":\" + port, nil))\n}\n",
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/import.go b/vendor/github.com/99designs/gqlgen/codegen/templates/import.go
index c9db2d96..effe9a0d 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/import.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/import.go
@@ -2,10 +2,10 @@ package templates
import (
"fmt"
- "go/build"
+ "go/types"
"strconv"
- "github.com/99designs/gqlgen/internal/gopath"
+ "github.com/99designs/gqlgen/internal/code"
)
type Import struct {
@@ -38,43 +38,42 @@ func (s *Imports) String() string {
return res
}
-func (s *Imports) Reserve(path string, aliases ...string) string {
+func (s *Imports) Reserve(path string, aliases ...string) (string, error) {
if path == "" {
panic("empty ambient import")
}
// if we are referencing our own package we dont need an import
- if gopath.MustDir2Import(s.destDir) == path {
- return ""
- }
-
- pkg, err := build.Default.Import(path, s.destDir, 0)
- if err != nil {
- panic(err)
+ if code.ImportPathForDir(s.destDir) == path {
+ return "", nil
}
+ name := code.NameForPackage(path)
var alias string
if len(aliases) != 1 {
- alias = pkg.Name
+ alias = name
} else {
alias = aliases[0]
}
if existing := s.findByPath(path); existing != nil {
- panic("ambient import already exists")
+ if existing.Alias == alias {
+ return "", nil
+ }
+ return "", fmt.Errorf("ambient import already exists")
}
if alias := s.findByAlias(alias); alias != nil {
- panic("ambient import collides on an alias")
+ return "", fmt.Errorf("ambient import collides on an alias")
}
s.imports = append(s.imports, &Import{
- Name: pkg.Name,
+ Name: name,
Path: path,
Alias: alias,
})
- return ""
+ return "", nil
}
func (s *Imports) Lookup(path string) string {
@@ -82,8 +81,10 @@ func (s *Imports) Lookup(path string) string {
return ""
}
+ path = code.NormalizeVendor(path)
+
// if we are referencing our own package we dont need an import
- if gopath.MustDir2Import(s.destDir) == path {
+ if code.ImportPathForDir(s.destDir) == path {
return ""
}
@@ -91,13 +92,8 @@ func (s *Imports) Lookup(path string) string {
return existing.Alias
}
- pkg, err := build.Default.Import(path, s.destDir, 0)
- if err != nil {
- panic(err)
- }
-
imp := &Import{
- Name: pkg.Name,
+ Name: code.NameForPackage(path),
Path: path,
}
s.imports = append(s.imports, imp)
@@ -116,6 +112,12 @@ func (s *Imports) Lookup(path string) string {
return imp.Alias
}
+func (s *Imports) LookupType(t types.Type) string {
+ return types.TypeString(t, func(i *types.Package) string {
+ return s.Lookup(i.Path())
+ })
+}
+
func (s Imports) findByPath(importPath string) *Import {
for _, imp := range s.imports {
if imp.Path == importPath {
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/input.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/input.gotpl
deleted file mode 100644
index f543608d..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/input.gotpl
+++ /dev/null
@@ -1,28 +0,0 @@
- {{- if .IsMarshaled }}
- func Unmarshal{{ .GQLType }}(v interface{}) ({{.FullName}}, error) {
- var it {{.FullName}}
- var asMap = v.(map[string]interface{})
- {{ range $field := .Fields}}
- {{- if $field.Default}}
- if _, present := asMap[{{$field.GQLName|quote}}] ; !present {
- asMap[{{$field.GQLName|quote}}] = {{ $field.Default | dump }}
- }
- {{- end}}
- {{- end }}
-
- for k, v := range asMap {
- switch k {
- {{- range $field := .Fields }}
- case {{$field.GQLName|quote}}:
- var err error
- {{ $field.Unmarshal (print "it." $field.GoFieldName) "v" }}
- if err != nil {
- return it, err
- }
- {{- end }}
- }
- }
-
- return it, nil
- }
- {{- end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/interface.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/interface.gotpl
deleted file mode 100644
index 84cbe500..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/interface.gotpl
+++ /dev/null
@@ -1,18 +0,0 @@
-{{- $interface := . }}
-
-func (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel ast.SelectionSet, obj *{{$interface.FullName}}) graphql.Marshaler {
- switch obj := (*obj).(type) {
- case nil:
- return graphql.Null
- {{- range $implementor := $interface.Implementors }}
- {{- if $implementor.ValueReceiver }}
- case {{$implementor.FullName}}:
- return ec._{{$implementor.GQLType}}(ctx, sel, &obj)
- {{- end}}
- case *{{$implementor.FullName}}:
- return ec._{{$implementor.GQLType}}(ctx, sel, obj)
- {{- end }}
- default:
- panic(fmt.Errorf("unexpected type %T", obj))
- }
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/models.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/models.gotpl
deleted file mode 100644
index db63a996..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/models.gotpl
+++ /dev/null
@@ -1,91 +0,0 @@
-// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
-
-package {{ .PackageName }}
-
-import (
- %%%IMPORTS%%%
-
- {{ reserveImport "context" }}
- {{ reserveImport "fmt" }}
- {{ reserveImport "io" }}
- {{ reserveImport "strconv" }}
- {{ reserveImport "time" }}
- {{ reserveImport "sync" }}
- {{ reserveImport "errors" }}
- {{ reserveImport "bytes" }}
-
- {{ reserveImport "github.com/vektah/gqlparser" }}
- {{ reserveImport "github.com/vektah/gqlparser/ast" }}
- {{ reserveImport "github.com/99designs/gqlgen/graphql" }}
- {{ reserveImport "github.com/99designs/gqlgen/graphql/introspection" }}
-)
-
-{{ range $model := .Models }}
- {{with .Description}} {{.|prefixLines "// "}} {{end}}
- {{- if .IsInterface }}
- type {{.GoType}} interface {
- Is{{.GoType}}()
- }
- {{- else }}
- type {{.GoType}} struct {
- {{- range $field := .Fields }}
- {{- with .Description}}
- {{.|prefixLines "// "}}
- {{- end}}
- {{- if $field.GoFieldName }}
- {{ $field.GoFieldName }} {{$field.Signature}} `json:"{{$field.GQLName}}"`
- {{- else }}
- {{ $field.GoFKName }} {{$field.GoFKType}}
- {{- end }}
- {{- end }}
- }
-
- {{- range $iface := .Implements }}
- func ({{$model.GoType}}) Is{{$iface.GoType}}() {}
- {{- end }}
-
- {{- end }}
-{{- end}}
-
-{{ range $enum := .Enums }}
- {{with .Description}}{{.|prefixLines "// "}} {{end}}
- type {{.GoType}} string
- const (
- {{- range $value := .Values}}
- {{- with .Description}}
- {{.|prefixLines "// "}}
- {{- end}}
- {{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}}
- {{- end }}
- )
-
- func (e {{.GoType}}) IsValid() bool {
- switch e {
- case {{ range $index, $element := .Values}}{{if $index}},{{end}}{{ $enum.GoType }}{{ $element.Name|toCamel }}{{end}}:
- return true
- }
- return false
- }
-
- func (e {{.GoType}}) String() string {
- return string(e)
- }
-
- func (e *{{.GoType}}) UnmarshalGQL(v interface{}) error {
- str, ok := v.(string)
- if !ok {
- return fmt.Errorf("enums must be strings")
- }
-
- *e = {{.GoType}}(str)
- if !e.IsValid() {
- return fmt.Errorf("%s is not a valid {{.GQLType}}", str)
- }
- return nil
- }
-
- func (e {{.GoType}}) MarshalGQL(w io.Writer) {
- fmt.Fprint(w, strconv.Quote(e.String()))
- }
-
-{{- end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/object.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/object.gotpl
deleted file mode 100644
index e98cbe1e..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/object.gotpl
+++ /dev/null
@@ -1,69 +0,0 @@
-{{ $object := . }}
-
-var {{ $object.GQLType|lcFirst}}Implementors = {{$object.Implementors}}
-
-// nolint: gocyclo, errcheck, gas, goconst
-{{- if .Stream }}
-func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet) func() graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
- Object: {{$object.GQLType|quote}},
- })
- if len(fields) != 1 {
- ec.Errorf(ctx, "must subscribe to exactly one stream")
- return nil
- }
-
- switch fields[0].Name {
- {{- range $field := $object.Fields }}
- case "{{$field.GQLName}}":
- return ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, fields[0])
- {{- end }}
- default:
- panic("unknown field " + strconv.Quote(fields[0].Name))
- }
-}
-{{- else }}
-func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {
- fields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)
- {{if $object.Root}}
- ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
- Object: {{$object.GQLType|quote}},
- })
- {{end}}
-
- {{if $object.IsConcurrent}} var wg sync.WaitGroup {{end}}
- out := graphql.NewOrderedMap(len(fields))
- invalid := false
- for i, field := range fields {
- out.Keys[i] = field.Alias
-
- switch field.Name {
- case "__typename":
- out.Values[i] = graphql.MarshalString({{$object.GQLType|quote}})
- {{- range $field := $object.Fields }}
- case "{{$field.GQLName}}":
- {{- if $field.IsConcurrent }}
- wg.Add(1)
- go func(i int, field graphql.CollectedField) {
- {{- end }}
- out.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})
- {{- if $field.ASTType.NonNull }}
- if out.Values[i] == graphql.Null {
- invalid = true
- }
- {{- end }}
- {{- if $field.IsConcurrent }}
- wg.Done()
- }(i, field)
- {{- end }}
- {{- end }}
- default:
- panic("unknown field " + strconv.Quote(field.Name))
- }
- }
- {{if $object.IsConcurrent}} wg.Wait() {{end}}
- if invalid { return graphql.Null }
- return out
-}
-{{- end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/resolver.gotpl b/vendor/github.com/99designs/gqlgen/codegen/templates/resolver.gotpl
deleted file mode 100644
index 53ba8c43..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/resolver.gotpl
+++ /dev/null
@@ -1,44 +0,0 @@
-package {{ .PackageName }}
-
-import (
- %%%IMPORTS%%%
-
- {{ reserveImport "context" }}
- {{ reserveImport "fmt" }}
- {{ reserveImport "io" }}
- {{ reserveImport "strconv" }}
- {{ reserveImport "time" }}
- {{ reserveImport "sync" }}
- {{ reserveImport "errors" }}
- {{ reserveImport "bytes" }}
-
- {{ reserveImport "github.com/99designs/gqlgen/handler" }}
- {{ reserveImport "github.com/vektah/gqlparser" }}
- {{ reserveImport "github.com/vektah/gqlparser/ast" }}
- {{ reserveImport "github.com/99designs/gqlgen/graphql" }}
- {{ reserveImport "github.com/99designs/gqlgen/graphql/introspection" }}
-)
-
-type {{.ResolverType}} struct {}
-
-{{ range $object := .Objects -}}
- {{- if $object.HasResolvers -}}
- func (r *{{$.ResolverType}}) {{$object.GQLType}}() {{ $object.ResolverInterface.FullName }} {
- return &{{lcFirst $object.GQLType}}Resolver{r}
- }
- {{ end -}}
-{{ end }}
-
-{{ range $object := .Objects -}}
- {{- if $object.HasResolvers -}}
- type {{lcFirst $object.GQLType}}Resolver struct { *Resolver }
-
- {{ range $field := $object.Fields -}}
- {{- if $field.IsResolver -}}
- func (r *{{lcFirst $object.GQLType}}Resolver) {{ $field.ShortResolverDeclaration }} {
- panic("not implemented")
- }
- {{ end -}}
- {{ end -}}
- {{ end -}}
-{{ end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/templates.go b/vendor/github.com/99designs/gqlgen/codegen/templates/templates.go
index 22e5d739..4c292732 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/templates.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/templates/templates.go
@@ -1,13 +1,14 @@
-//go:generate go run ./inliner/inliner.go
-
package templates
import (
"bytes"
"fmt"
+ "go/types"
"io/ioutil"
"os"
"path/filepath"
+ "reflect"
+ "runtime"
"sort"
"strconv"
"strings"
@@ -15,40 +16,141 @@ import (
"unicode"
"github.com/99designs/gqlgen/internal/imports"
-
"github.com/pkg/errors"
)
// this is done with a global because subtemplates currently get called in functions. Lets aim to remove this eventually.
var CurrentImports *Imports
-func Run(name string, tpldata interface{}) (*bytes.Buffer, error) {
- t := template.New("").Funcs(template.FuncMap{
- "ucFirst": ucFirst,
- "lcFirst": lcFirst,
- "quote": strconv.Quote,
- "rawQuote": rawQuote,
- "toCamel": ToCamel,
- "dump": dump,
- "prefixLines": prefixLines,
- "reserveImport": CurrentImports.Reserve,
- "lookupImport": CurrentImports.Lookup,
+type Options struct {
+ PackageName string
+ Filename string
+ RegionTags bool
+ GeneratedHeader bool
+ Data interface{}
+ Funcs template.FuncMap
+}
+
+func Render(cfg Options) error {
+ if CurrentImports != nil {
+ panic(fmt.Errorf("recursive or concurrent call to RenderToFile detected"))
+ }
+ CurrentImports = &Imports{destDir: filepath.Dir(cfg.Filename)}
+
+ // load path relative to calling source file
+ _, callerFile, _, _ := runtime.Caller(1)
+ rootDir := filepath.Dir(callerFile)
+
+ funcs := Funcs()
+ for n, f := range cfg.Funcs {
+ funcs[n] = f
+ }
+ t := template.New("").Funcs(funcs)
+
+ var roots []string
+ // load all the templates in the directory
+ err := filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ name := filepath.ToSlash(strings.TrimPrefix(path, rootDir+string(os.PathSeparator)))
+ if !strings.HasSuffix(info.Name(), ".gotpl") {
+ return nil
+ }
+ b, err := ioutil.ReadFile(path)
+ if err != nil {
+ return err
+ }
+
+ t, err = t.New(name).Parse(string(b))
+ if err != nil {
+ return errors.Wrap(err, cfg.Filename)
+ }
+
+ roots = append(roots, name)
+
+ return nil
})
+ if err != nil {
+ return errors.Wrap(err, "locating templates")
+ }
- for filename, data := range data {
- _, err := t.New(filename).Parse(data)
+ // then execute all the important looking ones in order, adding them to the same file
+ sort.Slice(roots, func(i, j int) bool {
+ // important files go first
+ if strings.HasSuffix(roots[i], "!.gotpl") {
+ return true
+ }
+ if strings.HasSuffix(roots[j], "!.gotpl") {
+ return false
+ }
+ return roots[i] < roots[j]
+ })
+ var buf bytes.Buffer
+ for _, root := range roots {
+ if cfg.RegionTags {
+ buf.WriteString("\n// region " + center(70, "*", " "+root+" ") + "\n")
+ }
+ err = t.Lookup(root).Execute(&buf, cfg.Data)
if err != nil {
- panic(err)
+ return errors.Wrap(err, root)
+ }
+ if cfg.RegionTags {
+ buf.WriteString("\n// endregion " + center(70, "*", " "+root+" ") + "\n")
}
}
- buf := &bytes.Buffer{}
- err := t.Lookup(name).Execute(buf, tpldata)
+ var result bytes.Buffer
+ if cfg.GeneratedHeader {
+ result.WriteString("// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.\n\n")
+ }
+ result.WriteString("package ")
+ result.WriteString(cfg.PackageName)
+ result.WriteString("\n\n")
+ result.WriteString("import (\n")
+ result.WriteString(CurrentImports.String())
+ result.WriteString(")\n")
+ _, err = buf.WriteTo(&result)
if err != nil {
- return nil, err
+ return err
}
+ CurrentImports = nil
- return buf, nil
+ return write(cfg.Filename, result.Bytes())
+}
+
+func center(width int, pad string, s string) string {
+ if len(s)+2 > width {
+ return s
+ }
+ lpad := (width - len(s)) / 2
+ rpad := width - (lpad + len(s))
+ return strings.Repeat(pad, lpad) + s + strings.Repeat(pad, rpad)
+}
+
+func Funcs() template.FuncMap {
+ return template.FuncMap{
+ "ucFirst": ucFirst,
+ "lcFirst": lcFirst,
+ "quote": strconv.Quote,
+ "rawQuote": rawQuote,
+ "dump": Dump,
+ "ref": ref,
+ "ts": TypeIdentifier,
+ "call": Call,
+ "prefixLines": prefixLines,
+ "notNil": notNil,
+ "reserveImport": CurrentImports.Reserve,
+ "lookupImport": CurrentImports.Lookup,
+ "go": ToGo,
+ "goPrivate": ToGoPrivate,
+ "add": func(a, b int) int {
+ return a + b
+ },
+ "render": func(filename string, tpldata interface{}) (*bytes.Buffer, error) {
+ return render(resolveName(filename, 0), tpldata)
+ },
+ }
}
func ucFirst(s string) string {
@@ -74,37 +176,276 @@ func isDelimiter(c rune) bool {
return c == '-' || c == '_' || unicode.IsSpace(c)
}
-func ToCamel(s string) string {
- buffer := make([]rune, 0, len(s))
- upper := true
- lastWasUpper := false
+func ref(p types.Type) string {
+ return CurrentImports.LookupType(p)
+}
- for _, c := range s {
- if isDelimiter(c) {
- upper = true
+var pkgReplacer = strings.NewReplacer(
+ "/", "ᚋ",
+ ".", "ᚗ",
+ "-", "ᚑ",
+)
+
+func TypeIdentifier(t types.Type) string {
+ res := ""
+ for {
+ switch it := t.(type) {
+ case *types.Pointer:
+ t.Underlying()
+ res += "ᚖ"
+ t = it.Elem()
+ case *types.Slice:
+ res += "ᚕ"
+ t = it.Elem()
+ case *types.Named:
+ res += pkgReplacer.Replace(it.Obj().Pkg().Path())
+ res += "ᚐ"
+ res += it.Obj().Name()
+ return res
+ case *types.Basic:
+ res += it.Name()
+ return res
+ case *types.Map:
+ res += "map"
+ return res
+ case *types.Interface:
+ res += "interface"
+ return res
+ default:
+ panic(fmt.Errorf("unexpected type %T", it))
+ }
+ }
+}
+
+func Call(p *types.Func) string {
+ pkg := CurrentImports.Lookup(p.Pkg().Path())
+
+ if pkg != "" {
+ pkg += "."
+ }
+
+ if p.Type() != nil {
+ // make sure the returned type is listed in our imports.
+ ref(p.Type().(*types.Signature).Results().At(0).Type())
+ }
+
+ return pkg + p.Name()
+}
+
+func ToGo(name string) string {
+ runes := make([]rune, 0, len(name))
+
+ wordWalker(name, func(info *wordInfo) {
+ word := info.Word
+ if info.MatchCommonInitial {
+ word = strings.ToUpper(word)
+ } else if !info.HasCommonInitial {
+ if strings.ToUpper(word) == word || strings.ToLower(word) == word {
+ // FOO or foo → Foo
+ // FOo → FOo
+ word = ucFirst(strings.ToLower(word))
+ }
+ }
+ runes = append(runes, []rune(word)...)
+ })
+
+ return string(runes)
+}
+
+func ToGoPrivate(name string) string {
+ runes := make([]rune, 0, len(name))
+
+ first := true
+ wordWalker(name, func(info *wordInfo) {
+ word := info.Word
+ if first {
+ if strings.ToUpper(word) == word || strings.ToLower(word) == word {
+ // ID → id, CAMEL → camel
+ word = strings.ToLower(info.Word)
+ } else {
+ // ITicket → iTicket
+ word = lcFirst(info.Word)
+ }
+ first = false
+ } else if info.MatchCommonInitial {
+ word = strings.ToUpper(word)
+ } else if !info.HasCommonInitial {
+ word = ucFirst(strings.ToLower(word))
+ }
+ runes = append(runes, []rune(word)...)
+ })
+
+ return sanitizeKeywords(string(runes))
+}
+
+type wordInfo struct {
+ Word string
+ MatchCommonInitial bool
+ HasCommonInitial bool
+}
+
+// This function is based on the following code.
+// https://github.com/golang/lint/blob/06c8688daad7faa9da5a0c2f163a3d14aac986ca/lint.go#L679
+func wordWalker(str string, f func(*wordInfo)) {
+ runes := []rune(str)
+ w, i := 0, 0 // index of start of word, scan
+ hasCommonInitial := false
+ for i+1 <= len(runes) {
+ eow := false // whether we hit the end of a word
+ if i+1 == len(runes) {
+ eow = true
+ } else if isDelimiter(runes[i+1]) {
+ // underscore; shift the remainder forward over any run of underscores
+ eow = true
+ n := 1
+ for i+n+1 < len(runes) && isDelimiter(runes[i+n+1]) {
+ n++
+ }
+
+ // Leave at most one underscore if the underscore is between two digits
+ if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) {
+ n--
+ }
+
+ copy(runes[i+1:], runes[i+n+1:])
+ runes = runes[:len(runes)-n]
+ } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) {
+ // lower->non-lower
+ eow = true
+ }
+ i++
+
+ // [w,i) is a word.
+ word := string(runes[w:i])
+ if !eow && commonInitialisms[word] && !unicode.IsLower(runes[i]) {
+ // through
+ // split IDFoo → ID, Foo
+ // but URLs → URLs
+ } else if !eow {
+ if commonInitialisms[word] {
+ hasCommonInitial = true
+ }
continue
}
- if !lastWasUpper && unicode.IsUpper(c) {
- upper = true
+
+ matchCommonInitial := false
+ if commonInitialisms[strings.ToUpper(word)] {
+ hasCommonInitial = true
+ matchCommonInitial = true
}
- if upper {
- buffer = append(buffer, unicode.ToUpper(c))
- } else {
- buffer = append(buffer, unicode.ToLower(c))
+ f(&wordInfo{
+ Word: word,
+ MatchCommonInitial: matchCommonInitial,
+ HasCommonInitial: hasCommonInitial,
+ })
+ hasCommonInitial = false
+ w = i
+ }
+}
+
+var keywords = []string{
+ "break",
+ "default",
+ "func",
+ "interface",
+ "select",
+ "case",
+ "defer",
+ "go",
+ "map",
+ "struct",
+ "chan",
+ "else",
+ "goto",
+ "package",
+ "switch",
+ "const",
+ "fallthrough",
+ "if",
+ "range",
+ "type",
+ "continue",
+ "for",
+ "import",
+ "return",
+ "var",
+ "_",
+}
+
+// sanitizeKeywords prevents collisions with go keywords for arguments to resolver functions
+func sanitizeKeywords(name string) string {
+ for _, k := range keywords {
+ if name == k {
+ return name + "Arg"
}
- upper = false
- lastWasUpper = unicode.IsUpper(c)
}
+ return name
+}
- return string(buffer)
+// commonInitialisms is a set of common initialisms.
+// Only add entries that are highly unlikely to be non-initialisms.
+// For instance, "ID" is fine (Freudian code is rare), but "AND" is not.
+var commonInitialisms = map[string]bool{
+ "ACL": true,
+ "API": true,
+ "ASCII": true,
+ "CPU": true,
+ "CSS": true,
+ "DNS": true,
+ "EOF": true,
+ "GUID": true,
+ "HTML": true,
+ "HTTP": true,
+ "HTTPS": true,
+ "ID": true,
+ "IP": true,
+ "JSON": true,
+ "LHS": true,
+ "QPS": true,
+ "RAM": true,
+ "RHS": true,
+ "RPC": true,
+ "SLA": true,
+ "SMTP": true,
+ "SQL": true,
+ "SSH": true,
+ "TCP": true,
+ "TLS": true,
+ "TTL": true,
+ "UDP": true,
+ "UI": true,
+ "UID": true,
+ "UUID": true,
+ "URI": true,
+ "URL": true,
+ "UTF8": true,
+ "VM": true,
+ "XML": true,
+ "XMPP": true,
+ "XSRF": true,
+ "XSS": true,
}
func rawQuote(s string) string {
return "`" + strings.Replace(s, "`", "`+\"`\"+`", -1) + "`"
}
-func dump(val interface{}) string {
+func notNil(field string, data interface{}) bool {
+ v := reflect.ValueOf(data)
+
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+ if v.Kind() != reflect.Struct {
+ return false
+ }
+ val := v.FieldByName(field)
+
+ return val.IsValid() && !val.IsNil()
+}
+
+func Dump(val interface{}) string {
switch val := val.(type) {
case int:
return strconv.Itoa(val)
@@ -121,7 +462,7 @@ func dump(val interface{}) string {
case []interface{}:
var parts []string
for _, part := range val {
- parts = append(parts, dump(part))
+ parts = append(parts, Dump(part))
}
return "[]interface{}{" + strings.Join(parts, ",") + "}"
case map[string]interface{}:
@@ -138,7 +479,7 @@ func dump(val interface{}) string {
buf.WriteString(strconv.Quote(key))
buf.WriteString(":")
- buf.WriteString(dump(data))
+ buf.WriteString(Dump(data))
buf.WriteString(",")
}
buf.WriteString("}")
@@ -152,22 +493,33 @@ func prefixLines(prefix, s string) string {
return prefix + strings.Replace(s, "\n", "\n"+prefix, -1)
}
-func RenderToFile(tpl string, filename string, data interface{}) error {
- if CurrentImports != nil {
- panic(fmt.Errorf("recursive or concurrent call to RenderToFile detected"))
+func resolveName(name string, skip int) string {
+ if name[0] == '.' {
+ // load path relative to calling source file
+ _, callerFile, _, _ := runtime.Caller(skip + 1)
+ return filepath.Join(filepath.Dir(callerFile), name[1:])
}
- CurrentImports = &Imports{destDir: filepath.Dir(filename)}
- var buf *bytes.Buffer
- buf, err := Run(tpl, data)
+ // load path relative to this directory
+ _, callerFile, _, _ := runtime.Caller(0)
+ return filepath.Join(filepath.Dir(callerFile), name)
+}
+
+func render(filename string, tpldata interface{}) (*bytes.Buffer, error) {
+ t := template.New("").Funcs(Funcs())
+
+ b, err := ioutil.ReadFile(filename)
if err != nil {
- return errors.Wrap(err, filename+" generation failed")
+ return nil, err
}
- b := bytes.Replace(buf.Bytes(), []byte("%%%IMPORTS%%%"), []byte(CurrentImports.String()), -1)
- CurrentImports = nil
+ t, err = t.New(filepath.Base(filename)).Parse(string(b))
+ if err != nil {
+ panic(err)
+ }
- return write(filename, b)
+ buf := &bytes.Buffer{}
+ return buf, t.Execute(buf, tpldata)
}
func write(filename string, b []byte) error {
diff --git a/vendor/github.com/99designs/gqlgen/codegen/type.go b/vendor/github.com/99designs/gqlgen/codegen/type.go
index 04d9bb2f..e0083732 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/type.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/type.go
@@ -1,174 +1,18 @@
package codegen
import (
- "strconv"
- "strings"
-
- "github.com/99designs/gqlgen/codegen/templates"
-
- "github.com/vektah/gqlparser/ast"
-)
-
-type NamedTypes map[string]*NamedType
-
-type NamedType struct {
- Ref
- IsScalar bool
- IsInterface bool
- IsInput bool
- GQLType string // Name of the graphql type
- Marshaler *Ref // If this type has an external marshaler this will be set
-}
-
-type Ref struct {
- GoType string // Name of the go type
- Package string // the package the go type lives in
- IsUserDefined bool // does the type exist in the typemap
-}
-
-type Type struct {
- *NamedType
-
- Modifiers []string
- ASTType *ast.Type
- AliasedType *Ref
-}
-
-const (
- modList = "[]"
- modPtr = "*"
+ "github.com/99designs/gqlgen/codegen/config"
)
-func (t Ref) FullName() string {
- return t.PkgDot() + t.GoType
-}
-
-func (t Ref) PkgDot() string {
- name := templates.CurrentImports.Lookup(t.Package)
- if name == "" {
- return ""
-
- }
-
- return name + "."
-}
-
-func (t Type) Signature() string {
- if t.AliasedType != nil {
- return strings.Join(t.Modifiers, "") + t.AliasedType.FullName()
- }
- return strings.Join(t.Modifiers, "") + t.FullName()
-}
-
-func (t Type) FullSignature() string {
- pkg := ""
- if t.Package != "" {
- pkg = t.Package + "."
- }
-
- return strings.Join(t.Modifiers, "") + pkg + t.GoType
-}
+func (b *builder) buildTypes() (map[string]*config.TypeReference, error) {
+ ret := map[string]*config.TypeReference{}
-func (t Type) IsPtr() bool {
- return len(t.Modifiers) > 0 && t.Modifiers[0] == modPtr
-}
+ for _, ref := range b.Binder.References {
+ for ref != nil {
+ ret[ref.UniquenessKey()] = ref
-func (t *Type) StripPtr() {
- if !t.IsPtr() {
- return
+ ref = ref.Elem()
+ }
}
- t.Modifiers = t.Modifiers[0 : len(t.Modifiers)-1]
-}
-
-func (t Type) IsSlice() bool {
- return len(t.Modifiers) > 0 && t.Modifiers[0] == modList ||
- len(t.Modifiers) > 1 && t.Modifiers[0] == modPtr && t.Modifiers[1] == modList
-}
-
-func (t NamedType) IsMarshaled() bool {
- return t.Marshaler != nil
-}
-
-func (t Type) Unmarshal(result, raw string) string {
- return t.unmarshal(result, raw, t.Modifiers, 1)
-}
-
-func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) string {
- switch {
- case len(remainingMods) > 0 && remainingMods[0] == modPtr:
- ptr := "ptr" + strconv.Itoa(depth)
- return tpl(`var {{.ptr}} {{.mods}}{{.t.FullName}}
- if {{.raw}} != nil {
- {{.next}}
- {{.result}} = &{{.ptr -}}
- }
- `, map[string]interface{}{
- "ptr": ptr,
- "t": t,
- "raw": raw,
- "result": result,
- "mods": strings.Join(remainingMods[1:], ""),
- "next": t.unmarshal(ptr, raw, remainingMods[1:], depth+1),
- })
-
- case len(remainingMods) > 0 && remainingMods[0] == modList:
- var rawIf = "rawIf" + strconv.Itoa(depth)
- var index = "idx" + strconv.Itoa(depth)
-
- return tpl(`var {{.rawSlice}} []interface{}
- if {{.raw}} != nil {
- if tmp1, ok := {{.raw}}.([]interface{}); ok {
- {{.rawSlice}} = tmp1
- } else {
- {{.rawSlice}} = []interface{}{ {{.raw}} }
- }
- }
- {{.result}} = make({{.type}}, len({{.rawSlice}}))
- for {{.index}} := range {{.rawSlice}} {
- {{ .next -}}
- }`, map[string]interface{}{
- "raw": raw,
- "rawSlice": rawIf,
- "index": index,
- "result": result,
- "type": strings.Join(remainingMods, "") + t.NamedType.FullName(),
- "next": t.unmarshal(result+"["+index+"]", rawIf+"["+index+"]", remainingMods[1:], depth+1),
- })
- }
-
- realResult := result
- if t.AliasedType != nil {
- result = "castTmp"
- }
-
- return tpl(`{{- if .t.AliasedType }}
- var castTmp {{.t.FullName}}
- {{ end }}
- {{- if eq .t.GoType "map[string]interface{}" }}
- {{- .result }} = {{.raw}}.(map[string]interface{})
- {{- else if .t.Marshaler }}
- {{- .result }}, err = {{ .t.Marshaler.PkgDot }}Unmarshal{{.t.Marshaler.GoType}}({{.raw}})
- {{- else -}}
- err = (&{{.result}}).UnmarshalGQL({{.raw}})
- {{- end }}
- {{- if .t.AliasedType }}
- {{ .realResult }} = {{.t.AliasedType.FullName}}(castTmp)
- {{- end }}`, map[string]interface{}{
- "realResult": realResult,
- "result": result,
- "raw": raw,
- "t": t,
- })
-}
-
-func (t Type) Marshal(val string) string {
- if t.AliasedType != nil {
- val = t.GoType + "(" + val + ")"
- }
-
- if t.Marshaler != nil {
- return "return " + t.Marshaler.PkgDot() + "Marshal" + t.Marshaler.GoType + "(" + val + ")"
- }
-
- return "return " + val
+ return ret, nil
}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/type.gotpl b/vendor/github.com/99designs/gqlgen/codegen/type.gotpl
new file mode 100644
index 00000000..f727baac
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/codegen/type.gotpl
@@ -0,0 +1,131 @@
+{{- range $type := .ReferencedTypes }}
+ {{ with $type.UnmarshalFunc }}
+ func (ec *executionContext) {{ . }}(ctx context.Context, v interface{}) ({{ $type.GO | ref }}, error) {
+ {{- if $type.IsNilable }}
+ if v == nil { return nil, nil }
+ {{- end }}
+ {{- if $type.IsPtr }}
+ res, err := ec.{{ $type.Elem.UnmarshalFunc }}(ctx, v)
+ return &res, err
+ {{- else if $type.IsSlice }}
+ var vSlice []interface{}
+ if v != nil {
+ if tmp1, ok := v.([]interface{}); ok {
+ vSlice = tmp1
+ } else {
+ vSlice = []interface{}{ v }
+ }
+ }
+ var err error
+ res := make([]{{$type.GO.Elem | ref}}, len(vSlice))
+ for i := range vSlice {
+ res[i], err = ec.{{ $type.Elem.UnmarshalFunc }}(ctx, vSlice[i])
+ if err != nil {
+ return nil, err
+ }
+ }
+ return res, nil
+ {{- else }}
+ {{- if $type.Unmarshaler }}
+ {{- if $type.CastType }}
+ tmp, err := {{ $type.Unmarshaler | call }}(v)
+ return {{ $type.GO | ref }}(tmp), err
+ {{- else}}
+ return {{ $type.Unmarshaler | call }}(v)
+ {{- end }}
+ {{- else if eq ($type.GO | ref) "map[string]interface{}" }}
+ return v.(map[string]interface{}), nil
+ {{- else if $type.IsMarshaler -}}
+ var res {{ $type.GO | ref }}
+ return res, res.UnmarshalGQL(v)
+ {{- else }}
+ return ec.unmarshalInput{{ $type.GQL.Name }}(ctx, v)
+ {{- end }}
+ {{- end }}
+ }
+ {{- end }}
+
+ {{ with $type.MarshalFunc }}
+ func (ec *executionContext) {{ . }}(ctx context.Context, sel ast.SelectionSet, v {{ $type.GO | ref }}) graphql.Marshaler {
+ {{- if $type.IsNilable }}
+ if v == nil {
+ {{- if $type.GQL.NonNull }}
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ {{- end }}
+ return graphql.Null
+ }
+ {{- else if $type.HasIsZero }}
+ if v.IsZero() {
+ {{- if $type.GQL.NonNull }}
+ if !ec.HasError(graphql.GetResolverContext(ctx)) {
+ ec.Errorf(ctx, "must not be null")
+ }
+ {{- end }}
+ return graphql.Null
+ }
+ {{- end }}
+
+ {{- if $type.IsSlice }}
+ {{- if not $type.GQL.NonNull }}
+ if v == nil {
+ return graphql.Null
+ }
+ {{- end }}
+ ret := make(graphql.Array, len(v))
+ {{- if not $type.IsScalar }}
+ var wg sync.WaitGroup
+ isLen1 := len(v) == 1
+ if !isLen1 {
+ wg.Add(len(v))
+ }
+ {{- end }}
+ for i := range v {
+ {{- if not $type.IsScalar }}
+ i := i
+ rctx := &graphql.ResolverContext{
+ Index: &i,
+ Result: &v[i],
+ }
+ ctx := graphql.WithResolverContext(ctx, rctx)
+ f := func(i int) {
+ defer func() {
+ if r := recover(); r != nil {
+ ec.Error(ctx, ec.Recover(ctx, r))
+ ret = nil
+ }
+ }()
+ if !isLen1 {
+ defer wg.Done()
+ }
+ ret[i] = ec.{{ $type.Elem.MarshalFunc }}(ctx, sel, v[i])
+ }
+ if isLen1 {
+ f(i)
+ } else {
+ go f(i)
+ }
+ {{ else }}
+ ret[i] = ec.{{ $type.Elem.MarshalFunc }}(ctx, sel, v[i])
+ {{- end}}
+ }
+ {{ if not $type.IsScalar }} wg.Wait() {{ end }}
+ return ret
+ {{- else }}
+
+ {{- if $type.IsMarshaler }}
+ return v
+ {{- else if $type.Marshaler }}
+ {{- if $type.IsPtr }}
+ return ec.{{ $type.Elem.MarshalFunc }}(ctx, sel, *v)
+ {{- else }}
+ return {{ $type.Marshaler | call }}({{- if $type.CastType }}{{ $type.CastType | ref }}(v){{else}}v{{- end }})
+ {{- end }}
+ {{- else }}
+ return ec._{{$type.Definition.Name}}(ctx, sel, {{ if not $type.IsNilable}}&{{end}} v)
+ {{- end }}
+ {{- end }}
+ }
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/type_build.go b/vendor/github.com/99designs/gqlgen/codegen/type_build.go
deleted file mode 100644
index 586b0db2..00000000
--- a/vendor/github.com/99designs/gqlgen/codegen/type_build.go
+++ /dev/null
@@ -1,100 +0,0 @@
-package codegen
-
-import (
- "go/types"
- "strings"
-
- "github.com/vektah/gqlparser/ast"
- "golang.org/x/tools/go/loader"
-)
-
-// namedTypeFromSchema objects for every graphql type, including scalars. There should only be one instance of Type for each thing
-func (cfg *Config) buildNamedTypes() NamedTypes {
- types := map[string]*NamedType{}
- for _, schemaType := range cfg.schema.Types {
- t := namedTypeFromSchema(schemaType)
-
- if userEntry, ok := cfg.Models[t.GQLType]; ok && userEntry.Model != "" {
- t.IsUserDefined = true
- t.Package, t.GoType = pkgAndType(userEntry.Model)
- } else if t.IsScalar {
- t.Package = "github.com/99designs/gqlgen/graphql"
- t.GoType = "String"
- }
-
- types[t.GQLType] = t
- }
- return types
-}
-
-func (cfg *Config) bindTypes(namedTypes NamedTypes, destDir string, prog *loader.Program) {
- for _, t := range namedTypes {
- if t.Package == "" {
- continue
- }
-
- def, _ := findGoType(prog, t.Package, "Marshal"+t.GoType)
- switch def := def.(type) {
- case *types.Func:
- sig := def.Type().(*types.Signature)
- cpy := t.Ref
- t.Marshaler = &cpy
-
- t.Package, t.GoType = pkgAndType(sig.Params().At(0).Type().String())
- }
- }
-}
-
-// namedTypeFromSchema objects for every graphql type, including primitives.
-// don't recurse into object fields or interfaces yet, lets make sure we have collected everything first.
-func namedTypeFromSchema(schemaType *ast.Definition) *NamedType {
- switch schemaType.Kind {
- case ast.Scalar, ast.Enum:
- return &NamedType{GQLType: schemaType.Name, IsScalar: true}
- case ast.Interface, ast.Union:
- return &NamedType{GQLType: schemaType.Name, IsInterface: true}
- case ast.InputObject:
- return &NamedType{GQLType: schemaType.Name, IsInput: true}
- default:
- return &NamedType{GQLType: schemaType.Name}
- }
-}
-
-// take a string in the form github.com/package/blah.Type and split it into package and type
-func pkgAndType(name string) (string, string) {
- parts := strings.Split(name, ".")
- if len(parts) == 1 {
- return "", name
- }
-
- return normalizeVendor(strings.Join(parts[:len(parts)-1], ".")), parts[len(parts)-1]
-}
-
-func (n NamedTypes) getType(t *ast.Type) *Type {
- orig := t
- var modifiers []string
- for {
- if t.Elem != nil {
- modifiers = append(modifiers, modList)
- t = t.Elem
- } else {
- if !t.NonNull {
- modifiers = append(modifiers, modPtr)
- }
- if n[t.NamedType] == nil {
- panic("missing type " + t.NamedType)
- }
- res := &Type{
- NamedType: n[t.NamedType],
- Modifiers: modifiers,
- ASTType: orig,
- }
-
- if res.IsInterface {
- res.StripPtr()
- }
-
- return res
- }
- }
-}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/util.go b/vendor/github.com/99designs/gqlgen/codegen/util.go
index cc6246fd..59dfde08 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/util.go
+++ b/vendor/github.com/99designs/gqlgen/codegen/util.go
@@ -1,65 +1,30 @@
package codegen
import (
- "fmt"
"go/types"
- "reflect"
- "regexp"
"strings"
"github.com/pkg/errors"
- "golang.org/x/tools/go/loader"
)
-func findGoType(prog *loader.Program, pkgName string, typeName string) (types.Object, error) {
- if pkgName == "" {
- return nil, nil
- }
- fullName := typeName
- if pkgName != "" {
- fullName = pkgName + "." + typeName
- }
-
- pkgName, err := resolvePkg(pkgName)
- if err != nil {
- return nil, errors.Errorf("unable to resolve package for %s: %s\n", fullName, err.Error())
- }
-
- pkg := prog.Imported[pkgName]
- if pkg == nil {
- return nil, errors.Errorf("required package was not loaded: %s", fullName)
- }
-
- for astNode, def := range pkg.Defs {
- if astNode.Name != typeName || def.Parent() == nil || def.Parent() != pkg.Pkg.Scope() {
- continue
- }
-
- return def, nil
- }
-
- return nil, errors.Errorf("unable to find type %s\n", fullName)
-}
-
-func findGoNamedType(prog *loader.Program, pkgName string, typeName string) (*types.Named, error) {
- def, err := findGoType(prog, pkgName, typeName)
- if err != nil {
- return nil, err
- }
+func findGoNamedType(def types.Type) (*types.Named, error) {
if def == nil {
return nil, nil
}
- namedType, ok := def.Type().(*types.Named)
+ namedType, ok := def.(*types.Named)
if !ok {
- return nil, errors.Errorf("expected %s to be a named type, instead found %T\n", typeName, def.Type())
+ return nil, errors.Errorf("expected %s to be a named type, instead found %T\n", def.String(), def)
}
return namedType, nil
}
-func findGoInterface(prog *loader.Program, pkgName string, typeName string) (*types.Interface, error) {
- namedType, err := findGoNamedType(prog, pkgName, typeName)
+func findGoInterface(def types.Type) (*types.Interface, error) {
+ if def == nil {
+ return nil, nil
+ }
+ namedType, err := findGoNamedType(def)
if err != nil {
return nil, err
}
@@ -69,319 +34,14 @@ func findGoInterface(prog *loader.Program, pkgName string, typeName string) (*ty
underlying, ok := namedType.Underlying().(*types.Interface)
if !ok {
- return nil, errors.Errorf("expected %s to be a named interface, instead found %s", typeName, namedType.String())
+ return nil, errors.Errorf("expected %s to be a named interface, instead found %s", def.String(), namedType.String())
}
return underlying, nil
}
-func findMethod(typ *types.Named, name string) *types.Func {
- for i := 0; i < typ.NumMethods(); i++ {
- method := typ.Method(i)
- if !method.Exported() {
- continue
- }
-
- if strings.EqualFold(method.Name(), name) {
- return method
- }
- }
-
- if s, ok := typ.Underlying().(*types.Struct); ok {
- for i := 0; i < s.NumFields(); i++ {
- field := s.Field(i)
- if !field.Anonymous() {
- continue
- }
-
- if named, ok := field.Type().(*types.Named); ok {
- if f := findMethod(named, name); f != nil {
- return f
- }
- }
- }
- }
-
- return nil
-}
-
func equalFieldName(source, target string) bool {
source = strings.Replace(source, "_", "", -1)
target = strings.Replace(target, "_", "", -1)
return strings.EqualFold(source, target)
}
-
-// findField attempts to match the name to a struct field with the following
-// priorites:
-// 1. If struct tag is passed then struct tag has highest priority
-// 2. Field in an embedded struct
-// 3. Actual Field name
-func findField(typ *types.Struct, name, structTag string) (*types.Var, error) {
- var foundField *types.Var
- foundFieldWasTag := false
-
- for i := 0; i < typ.NumFields(); i++ {
- field := typ.Field(i)
-
- if structTag != "" {
- tags := reflect.StructTag(typ.Tag(i))
- if val, ok := tags.Lookup(structTag); ok {
- if equalFieldName(val, name) {
- if foundField != nil && foundFieldWasTag {
- return nil, errors.Errorf("tag %s is ambigious; multiple fields have the same tag value of %s", structTag, val)
- }
-
- foundField = field
- foundFieldWasTag = true
- }
- }
- }
-
- if field.Anonymous() {
-
- fieldType := field.Type()
-
- if ptr, ok := fieldType.(*types.Pointer); ok {
- fieldType = ptr.Elem()
- }
-
- // Type.Underlying() returns itself for all types except types.Named, where it returns a struct type.
- // It should be safe to always call.
- if named, ok := fieldType.Underlying().(*types.Struct); ok {
- f, err := findField(named, name, structTag)
- if err != nil && !strings.HasPrefix(err.Error(), "no field named") {
- return nil, err
- }
- if f != nil && foundField == nil {
- foundField = f
- }
- }
- }
-
- if !field.Exported() {
- continue
- }
-
- if equalFieldName(field.Name(), name) && foundField == nil { // aqui!
- foundField = field
- }
- }
-
- if foundField == nil {
- return nil, fmt.Errorf("no field named %s", name)
- }
-
- return foundField, nil
-}
-
-type BindError struct {
- object *Object
- field *Field
- typ types.Type
- methodErr error
- varErr error
-}
-
-func (b BindError) Error() string {
- return fmt.Sprintf(
- "Unable to bind %s.%s to %s\n %s\n %s",
- b.object.GQLType,
- b.field.GQLName,
- b.typ.String(),
- b.methodErr.Error(),
- b.varErr.Error(),
- )
-}
-
-type BindErrors []BindError
-
-func (b BindErrors) Error() string {
- var errs []string
- for _, err := range b {
- errs = append(errs, err.Error())
- }
- return strings.Join(errs, "\n\n")
-}
-
-func bindObject(t types.Type, object *Object, structTag string) BindErrors {
- var errs BindErrors
- for i := range object.Fields {
- field := &object.Fields[i]
-
- if field.ForceResolver {
- continue
- }
-
- // first try binding to a method
- methodErr := bindMethod(t, field)
- if methodErr == nil {
- continue
- }
-
- // otherwise try binding to a var
- varErr := bindVar(t, field, structTag)
-
- if varErr != nil {
- errs = append(errs, BindError{
- object: object,
- typ: t,
- field: field,
- varErr: varErr,
- methodErr: methodErr,
- })
- }
- }
- return errs
-}
-
-func bindMethod(t types.Type, field *Field) error {
- namedType, ok := t.(*types.Named)
- if !ok {
- return fmt.Errorf("not a named type")
- }
-
- goName := field.GQLName
- if field.GoFieldName != "" {
- goName = field.GoFieldName
- }
- method := findMethod(namedType, goName)
- if method == nil {
- return fmt.Errorf("no method named %s", field.GQLName)
- }
- sig := method.Type().(*types.Signature)
-
- if sig.Results().Len() == 1 {
- field.NoErr = true
- } else if sig.Results().Len() != 2 {
- return fmt.Errorf("method has wrong number of args")
- }
- params := sig.Params()
- // If the first argument is the context, remove it from the comparison and set
- // the MethodHasContext flag so that the context will be passed to this model's method
- if params.Len() > 0 && params.At(0).Type().String() == "context.Context" {
- field.MethodHasContext = true
- vars := make([]*types.Var, params.Len()-1)
- for i := 1; i < params.Len(); i++ {
- vars[i-1] = params.At(i)
- }
- params = types.NewTuple(vars...)
- }
-
- newArgs, err := matchArgs(field, params)
- if err != nil {
- return err
- }
-
- result := sig.Results().At(0)
- if err := validateTypeBinding(field, result.Type()); err != nil {
- return errors.Wrap(err, "method has wrong return type")
- }
-
- // success, args and return type match. Bind to method
- field.GoFieldType = GoFieldMethod
- field.GoReceiverName = "obj"
- field.GoFieldName = method.Name()
- field.Args = newArgs
- return nil
-}
-
-func bindVar(t types.Type, field *Field, structTag string) error {
- underlying, ok := t.Underlying().(*types.Struct)
- if !ok {
- return fmt.Errorf("not a struct")
- }
-
- goName := field.GQLName
- if field.GoFieldName != "" {
- goName = field.GoFieldName
- }
- structField, err := findField(underlying, goName, structTag)
- if err != nil {
- return err
- }
-
- if err := validateTypeBinding(field, structField.Type()); err != nil {
- return errors.Wrap(err, "field has wrong type")
- }
-
- // success, bind to var
- field.GoFieldType = GoFieldVariable
- field.GoReceiverName = "obj"
- field.GoFieldName = structField.Name()
- return nil
-}
-
-func matchArgs(field *Field, params *types.Tuple) ([]FieldArgument, error) {
- var newArgs []FieldArgument
-
-nextArg:
- for j := 0; j < params.Len(); j++ {
- param := params.At(j)
- for _, oldArg := range field.Args {
- if strings.EqualFold(oldArg.GQLName, param.Name()) {
- if !field.ForceResolver {
- oldArg.Type.Modifiers = modifiersFromGoType(param.Type())
- }
- newArgs = append(newArgs, oldArg)
- continue nextArg
- }
- }
-
- // no matching arg found, abort
- return nil, fmt.Errorf("arg %s not found on method", param.Name())
- }
- return newArgs, nil
-}
-
-func validateTypeBinding(field *Field, goType types.Type) error {
- gqlType := normalizeVendor(field.Type.FullSignature())
- goTypeStr := normalizeVendor(goType.String())
-
- if equalTypes(goTypeStr, gqlType) {
- field.Type.Modifiers = modifiersFromGoType(goType)
- return nil
- }
-
- // deal with type aliases
- underlyingStr := normalizeVendor(goType.Underlying().String())
- if equalTypes(underlyingStr, gqlType) {
- field.Type.Modifiers = modifiersFromGoType(goType)
- pkg, typ := pkgAndType(goType.String())
- field.AliasedType = &Ref{GoType: typ, Package: pkg}
- return nil
- }
-
- return fmt.Errorf("%s is not compatible with %s", gqlType, goTypeStr)
-}
-
-func modifiersFromGoType(t types.Type) []string {
- var modifiers []string
- for {
- switch val := t.(type) {
- case *types.Pointer:
- modifiers = append(modifiers, modPtr)
- t = val.Elem()
- case *types.Array:
- modifiers = append(modifiers, modList)
- t = val.Elem()
- case *types.Slice:
- modifiers = append(modifiers, modList)
- t = val.Elem()
- default:
- return modifiers
- }
- }
-}
-
-var modsRegex = regexp.MustCompile(`^(\*|\[\])*`)
-
-func normalizeVendor(pkg string) string {
- modifiers := modsRegex.FindAllString(pkg, 1)[0]
- pkg = strings.TrimPrefix(pkg, modifiers)
- parts := strings.Split(pkg, "/vendor/")
- return modifiers + parts[len(parts)-1]
-}
-
-func equalTypes(goType string, gqlType string) bool {
- return goType == gqlType || "*"+goType == gqlType || goType == "*"+gqlType || strings.Replace(goType, "[]*", "[]", -1) == gqlType
-}
diff --git a/vendor/github.com/99designs/gqlgen/docs/content/_introduction.md b/vendor/github.com/99designs/gqlgen/docs/content/_introduction.md
new file mode 120000
index 00000000..fe840054
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/docs/content/_introduction.md
@@ -0,0 +1 @@
+../../README.md \ No newline at end of file
diff --git a/vendor/github.com/99designs/gqlgen/graphql/bool.go b/vendor/github.com/99designs/gqlgen/graphql/bool.go
index 7053bbca..b175ca98 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/bool.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/bool.go
@@ -19,7 +19,7 @@ func MarshalBoolean(b bool) Marshaler {
func UnmarshalBoolean(v interface{}) (bool, error) {
switch v := v.(type) {
case string:
- return "true" == strings.ToLower(v), nil
+ return strings.ToLower(v) == "true", nil
case int:
return v != 0, nil
case bool:
diff --git a/vendor/github.com/99designs/gqlgen/graphql/context.go b/vendor/github.com/99designs/gqlgen/graphql/context.go
index f83fa36f..58d3c741 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/context.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/context.go
@@ -12,6 +12,7 @@ import (
type Resolver func(ctx context.Context) (res interface{}, err error)
type FieldMiddleware func(ctx context.Context, next Resolver) (res interface{}, err error)
type RequestMiddleware func(ctx context.Context, next func(ctx context.Context) []byte) []byte
+type ComplexityLimitFunc func(ctx context.Context) int
type RequestContext struct {
RawQuery string
@@ -71,12 +72,10 @@ const (
)
func GetRequestContext(ctx context.Context) *RequestContext {
- val := ctx.Value(request)
- if val == nil {
- return nil
+ if val, ok := ctx.Value(request).(*RequestContext); ok {
+ return val
}
-
- return val.(*RequestContext)
+ return nil
}
func WithRequestContext(ctx context.Context, rc *RequestContext) context.Context {
@@ -95,6 +94,8 @@ type ResolverContext struct {
Index *int
// The result object of resolver
Result interface{}
+ // IsMethod indicates if the resolver is a method
+ IsMethod bool
}
func (r *ResolverContext) Path() []interface{} {
@@ -117,8 +118,10 @@ func (r *ResolverContext) Path() []interface{} {
}
func GetResolverContext(ctx context.Context) *ResolverContext {
- val, _ := ctx.Value(resolver).(*ResolverContext)
- return val
+ if val, ok := ctx.Value(resolver).(*ResolverContext); ok {
+ return val
+ }
+ return nil
}
func WithResolverContext(ctx context.Context, rc *ResolverContext) context.Context {
@@ -132,6 +135,24 @@ func CollectFieldsCtx(ctx context.Context, satisfies []string) []CollectedField
return CollectFields(ctx, resctx.Field.Selections, satisfies)
}
+// CollectAllFields returns a slice of all GraphQL field names that were selected for the current resolver context.
+// The slice will contain the unique set of all field names requested regardless of fragment type conditions.
+func CollectAllFields(ctx context.Context) []string {
+ resctx := GetResolverContext(ctx)
+ collected := CollectFields(ctx, resctx.Field.Selections, nil)
+ uniq := make([]string, 0, len(collected))
+Next:
+ for _, f := range collected {
+ for _, name := range uniq {
+ if name == f.Name {
+ continue Next
+ }
+ }
+ uniq = append(uniq, f.Name)
+ }
+ return uniq
+}
+
// Errorf sends an error string to the client, passing it through the formatter.
func (c *RequestContext) Errorf(ctx context.Context, format string, args ...interface{}) {
c.errorsMu.Lock()
@@ -217,3 +238,37 @@ func (c *RequestContext) RegisterExtension(key string, value interface{}) error
c.Extensions[key] = value
return nil
}
+
+// ChainFieldMiddleware add chain by FieldMiddleware
+func ChainFieldMiddleware(handleFunc ...FieldMiddleware) FieldMiddleware {
+ n := len(handleFunc)
+
+ if n > 1 {
+ lastI := n - 1
+ return func(ctx context.Context, next Resolver) (interface{}, error) {
+ var (
+ chainHandler Resolver
+ curI int
+ )
+ chainHandler = func(currentCtx context.Context) (interface{}, error) {
+ if curI == lastI {
+ return next(currentCtx)
+ }
+ curI++
+ res, err := handleFunc[curI](currentCtx, chainHandler)
+ curI--
+ return res, err
+
+ }
+ return handleFunc[0](ctx, chainHandler)
+ }
+ }
+
+ if n == 1 {
+ return handleFunc[0]
+ }
+
+ return func(ctx context.Context, next Resolver) (interface{}, error) {
+ return next(ctx)
+ }
+}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/error.go b/vendor/github.com/99designs/gqlgen/graphql/error.go
index 7f161a43..af8b4ce4 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/error.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/error.go
@@ -14,7 +14,9 @@ type ExtendedError interface {
func DefaultErrorPresenter(ctx context.Context, err error) *gqlerror.Error {
if gqlerr, ok := err.(*gqlerror.Error); ok {
- gqlerr.Path = GetResolverContext(ctx).Path()
+ if gqlerr.Path == nil {
+ gqlerr.Path = GetResolverContext(ctx).Path()
+ }
return gqlerr
}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/exec.go b/vendor/github.com/99designs/gqlgen/graphql/exec.go
index 9beb3149..17c57bf6 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/exec.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/exec.go
@@ -16,6 +16,9 @@ type ExecutableSchema interface {
Subscription(ctx context.Context, op *ast.OperationDefinition) func() *Response
}
+// CollectFields returns the set of fields from an ast.SelectionSet where all collected fields satisfy at least one of the GraphQL types
+// passed through satisfies. Providing an empty or nil slice for satisfies will return collect all fields regardless of fragment
+// type conditions.
func CollectFields(ctx context.Context, selSet ast.SelectionSet, satisfies []string) []CollectedField {
return collectFields(GetRequestContext(ctx), selSet, satisfies, map[string]bool{})
}
@@ -35,7 +38,10 @@ func collectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []
f.Selections = append(f.Selections, sel.SelectionSet...)
case *ast.InlineFragment:
- if !shouldIncludeNode(sel.Directives, reqCtx.Variables) || !instanceOf(sel.TypeCondition, satisfies) {
+ if !shouldIncludeNode(sel.Directives, reqCtx.Variables) {
+ continue
+ }
+ if len(satisfies) > 0 && !instanceOf(sel.TypeCondition, satisfies) {
continue
}
for _, childField := range collectFields(reqCtx, sel.SelectionSet, satisfies, visited) {
@@ -59,7 +65,7 @@ func collectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []
panic(fmt.Errorf("missing fragment %s", fragmentName))
}
- if !instanceOf(fragment.TypeCondition, satisfies) {
+ if len(satisfies) > 0 && !instanceOf(fragment.TypeCondition, satisfies) {
continue
}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/fieldset.go b/vendor/github.com/99designs/gqlgen/graphql/fieldset.go
new file mode 100644
index 00000000..351e266f
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/graphql/fieldset.go
@@ -0,0 +1,63 @@
+package graphql
+
+import (
+ "io"
+ "sync"
+)
+
+type FieldSet struct {
+ fields []CollectedField
+ Values []Marshaler
+ delayed []delayedResult
+}
+
+type delayedResult struct {
+ i int
+ f func() Marshaler
+}
+
+func NewFieldSet(fields []CollectedField) *FieldSet {
+ return &FieldSet{
+ fields: fields,
+ Values: make([]Marshaler, len(fields)),
+ }
+}
+
+func (m *FieldSet) Concurrently(i int, f func() Marshaler) {
+ m.delayed = append(m.delayed, delayedResult{i: i, f: f})
+}
+
+func (m *FieldSet) Dispatch() {
+ if len(m.delayed) == 1 {
+ // only one concurrent task, no need to spawn a goroutine or deal create waitgroups
+ d := m.delayed[0]
+ m.Values[d.i] = d.f()
+ } else if len(m.delayed) > 1 {
+ // more than one concurrent task, use the main goroutine to do one, only spawn goroutines for the others
+
+ var wg sync.WaitGroup
+ for _, d := range m.delayed[1:] {
+ wg.Add(1)
+ go func(d delayedResult) {
+ m.Values[d.i] = d.f()
+ wg.Done()
+ }(d)
+ }
+
+ m.Values[m.delayed[0].i] = m.delayed[0].f()
+ wg.Wait()
+ }
+}
+
+func (m *FieldSet) MarshalGQL(writer io.Writer) {
+ writer.Write(openBrace)
+ for i, field := range m.fields {
+ if i != 0 {
+ writer.Write(comma)
+ }
+ writeQuotedString(writer, field.Alias)
+ writer.Write(colon)
+ m.Values[i].MarshalGQL(writer)
+ }
+ writer.Write(closeBrace)
+}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/id.go b/vendor/github.com/99designs/gqlgen/graphql/id.go
index a5a7960f..4f532037 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/id.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/id.go
@@ -34,3 +34,24 @@ func UnmarshalID(v interface{}) (string, error) {
return "", fmt.Errorf("%T is not a string", v)
}
}
+
+func MarshalIntID(i int) Marshaler {
+ return WriterFunc(func(w io.Writer) {
+ writeQuotedString(w, strconv.Itoa(i))
+ })
+}
+
+func UnmarshalIntID(v interface{}) (int, error) {
+ switch v := v.(type) {
+ case string:
+ return strconv.Atoi(v)
+ case int:
+ return v, nil
+ case int64:
+ return int(v), nil
+ case json.Number:
+ return strconv.Atoi(string(v))
+ default:
+ return 0, fmt.Errorf("%T is not an int", v)
+ }
+}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/int.go b/vendor/github.com/99designs/gqlgen/graphql/int.go
index ff87574c..57d0d589 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/int.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/int.go
@@ -27,3 +27,53 @@ func UnmarshalInt(v interface{}) (int, error) {
return 0, fmt.Errorf("%T is not an int", v)
}
}
+
+func MarshalInt64(i int64) Marshaler {
+ return WriterFunc(func(w io.Writer) {
+ io.WriteString(w, strconv.FormatInt(i, 10))
+ })
+}
+
+func UnmarshalInt64(v interface{}) (int64, error) {
+ switch v := v.(type) {
+ case string:
+ return strconv.ParseInt(v, 10, 64)
+ case int:
+ return int64(v), nil
+ case int64:
+ return v, nil
+ case json.Number:
+ return strconv.ParseInt(string(v), 10, 64)
+ default:
+ return 0, fmt.Errorf("%T is not an int", v)
+ }
+}
+
+func MarshalInt32(i int32) Marshaler {
+ return WriterFunc(func(w io.Writer) {
+ io.WriteString(w, strconv.FormatInt(int64(i), 10))
+ })
+}
+
+func UnmarshalInt32(v interface{}) (int32, error) {
+ switch v := v.(type) {
+ case string:
+ iv, err := strconv.ParseInt(v, 10, 32)
+ if err != nil {
+ return 0, err
+ }
+ return int32(iv), nil
+ case int:
+ return int32(v), nil
+ case int64:
+ return int32(v), nil
+ case json.Number:
+ iv, err := strconv.ParseInt(string(v), 10, 32)
+ if err != nil {
+ return 0, err
+ }
+ return int32(iv), nil
+ default:
+ return 0, fmt.Errorf("%T is not an int", v)
+ }
+}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/introspection/type.go b/vendor/github.com/99designs/gqlgen/graphql/introspection/type.go
index b963aa0e..f1228edf 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/introspection/type.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/introspection/type.go
@@ -62,9 +62,9 @@ func (t *Type) Description() string {
func (t *Type) Fields(includeDeprecated bool) []Field {
if t.def == nil || (t.def.Kind != ast.Object && t.def.Kind != ast.Interface) {
- return nil
+ return []Field{}
}
- var fields []Field
+ fields := []Field{}
for _, f := range t.def.Fields {
if strings.HasPrefix(f.Name, "__") {
continue
@@ -93,10 +93,10 @@ func (t *Type) Fields(includeDeprecated bool) []Field {
func (t *Type) InputFields() []InputValue {
if t.def == nil || t.def.Kind != ast.InputObject {
- return nil
+ return []InputValue{}
}
- var res []InputValue
+ res := []InputValue{}
for _, f := range t.def.Fields {
res = append(res, InputValue{
Name: f.Name,
@@ -118,10 +118,10 @@ func defaultValue(value *ast.Value) *string {
func (t *Type) Interfaces() []Type {
if t.def == nil || t.def.Kind != ast.Object {
- return nil
+ return []Type{}
}
- var res []Type
+ res := []Type{}
for _, intf := range t.def.Interfaces {
res = append(res, *WrapTypeFromDef(t.schema, t.schema.Types[intf]))
}
@@ -131,10 +131,10 @@ func (t *Type) Interfaces() []Type {
func (t *Type) PossibleTypes() []Type {
if t.def == nil || (t.def.Kind != ast.Interface && t.def.Kind != ast.Union) {
- return nil
+ return []Type{}
}
- var res []Type
+ res := []Type{}
for _, pt := range t.schema.GetPossibleTypes(t.def) {
res = append(res, *WrapTypeFromDef(t.schema, pt))
}
@@ -143,10 +143,10 @@ func (t *Type) PossibleTypes() []Type {
func (t *Type) EnumValues(includeDeprecated bool) []EnumValue {
if t.def == nil || t.def.Kind != ast.Enum {
- return nil
+ return []EnumValue{}
}
- var res []EnumValue
+ res := []EnumValue{}
for _, val := range t.def.EnumValues {
res = append(res, EnumValue{
Name: val.Name,
diff --git a/vendor/github.com/99designs/gqlgen/graphql/jsonw.go b/vendor/github.com/99designs/gqlgen/graphql/jsonw.go
index c112444a..db95d8e4 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/jsonw.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/jsonw.go
@@ -2,7 +2,6 @@ package graphql
import (
"io"
- "strconv"
)
var nullLit = []byte(`null`)
@@ -27,42 +26,12 @@ type Unmarshaler interface {
UnmarshalGQL(v interface{}) error
}
-type OrderedMap struct {
- Keys []string
- Values []Marshaler
-}
-
type WriterFunc func(writer io.Writer)
func (f WriterFunc) MarshalGQL(w io.Writer) {
f(w)
}
-func NewOrderedMap(len int) *OrderedMap {
- return &OrderedMap{
- Keys: make([]string, len),
- Values: make([]Marshaler, len),
- }
-}
-
-func (m *OrderedMap) Add(key string, value Marshaler) {
- m.Keys = append(m.Keys, key)
- m.Values = append(m.Values, value)
-}
-
-func (m *OrderedMap) MarshalGQL(writer io.Writer) {
- writer.Write(openBrace)
- for i, key := range m.Keys {
- if i != 0 {
- writer.Write(comma)
- }
- io.WriteString(writer, strconv.Quote(key))
- writer.Write(colon)
- m.Values[i].MarshalGQL(writer)
- }
- writer.Write(closeBrace)
-}
-
type Array []Marshaler
func (a Array) MarshalGQL(writer io.Writer) {
diff --git a/vendor/github.com/99designs/gqlgen/graphql/root.go b/vendor/github.com/99designs/gqlgen/graphql/root.go
new file mode 100644
index 00000000..3405d180
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/graphql/root.go
@@ -0,0 +1,7 @@
+package graphql
+
+type Query struct{}
+
+type Mutation struct{}
+
+type Subscription struct{}
diff --git a/vendor/github.com/99designs/gqlgen/graphql/string.go b/vendor/github.com/99designs/gqlgen/graphql/string.go
index d5fb3294..7c1b7d95 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/string.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/string.go
@@ -10,37 +10,42 @@ const encodeHex = "0123456789ABCDEF"
func MarshalString(s string) Marshaler {
return WriterFunc(func(w io.Writer) {
- start := 0
- io.WriteString(w, `"`)
-
- for i, c := range s {
- if c < 0x20 || c == '\\' || c == '"' {
- io.WriteString(w, s[start:i])
-
- switch c {
- case '\t':
- io.WriteString(w, `\t`)
- case '\r':
- io.WriteString(w, `\r`)
- case '\n':
- io.WriteString(w, `\n`)
- case '\\':
- io.WriteString(w, `\\`)
- case '"':
- io.WriteString(w, `\"`)
- default:
- io.WriteString(w, `\u00`)
- w.Write([]byte{encodeHex[c>>4], encodeHex[c&0xf]})
- }
-
- start = i + 1
+ writeQuotedString(w, s)
+ })
+}
+
+func writeQuotedString(w io.Writer, s string) {
+ start := 0
+ io.WriteString(w, `"`)
+
+ for i, c := range s {
+ if c < 0x20 || c == '\\' || c == '"' {
+ io.WriteString(w, s[start:i])
+
+ switch c {
+ case '\t':
+ io.WriteString(w, `\t`)
+ case '\r':
+ io.WriteString(w, `\r`)
+ case '\n':
+ io.WriteString(w, `\n`)
+ case '\\':
+ io.WriteString(w, `\\`)
+ case '"':
+ io.WriteString(w, `\"`)
+ default:
+ io.WriteString(w, `\u00`)
+ w.Write([]byte{encodeHex[c>>4], encodeHex[c&0xf]})
}
+
+ start = i + 1
}
+ }
- io.WriteString(w, s[start:])
- io.WriteString(w, `"`)
- })
+ io.WriteString(w, s[start:])
+ io.WriteString(w, `"`)
}
+
func UnmarshalString(v interface{}) (string, error) {
switch v := v.(type) {
case string:
diff --git a/vendor/github.com/99designs/gqlgen/graphql/version.go b/vendor/github.com/99designs/gqlgen/graphql/version.go
index 490ff3ff..88014abf 100644
--- a/vendor/github.com/99designs/gqlgen/graphql/version.go
+++ b/vendor/github.com/99designs/gqlgen/graphql/version.go
@@ -1,3 +1,3 @@
package graphql
-const Version = "v0.7.2"
+const Version = "v0.8.3"
diff --git a/vendor/github.com/99designs/gqlgen/handler/graphql.go b/vendor/github.com/99designs/gqlgen/handler/graphql.go
index 7c5f70cf..92a0471c 100644
--- a/vendor/github.com/99designs/gqlgen/handler/graphql.go
+++ b/vendor/github.com/99designs/gqlgen/handler/graphql.go
@@ -34,6 +34,7 @@ type Config struct {
requestHook graphql.RequestMiddleware
tracer graphql.Tracer
complexityLimit int
+ complexityLimitFunc graphql.ComplexityLimitFunc
disableIntrospection bool
connectionKeepAlivePingInterval time.Duration
}
@@ -60,11 +61,9 @@ func (c *Config) newRequestContext(es graphql.ExecutableSchema, doc *ast.QueryDo
if hook := c.tracer; hook != nil {
reqCtx.Tracer = hook
- } else {
- reqCtx.Tracer = &graphql.NopTracer{}
}
- if c.complexityLimit > 0 {
+ if c.complexityLimit > 0 || c.complexityLimitFunc != nil {
reqCtx.ComplexityLimit = c.complexityLimit
operationComplexity := complexity.Calculate(es, op, variables)
reqCtx.OperationComplexity = operationComplexity
@@ -112,6 +111,15 @@ func ComplexityLimit(limit int) Option {
}
}
+// ComplexityLimitFunc allows you to define a function to dynamically set the maximum query complexity that is allowed
+// to be executed.
+// If a query is submitted that exceeds the limit, a 422 status code will be returned.
+func ComplexityLimitFunc(complexityLimitFunc graphql.ComplexityLimitFunc) Option {
+ return func(cfg *Config) {
+ cfg.complexityLimitFunc = complexityLimitFunc
+ }
+}
+
// ResolverMiddleware allows you to define a function that will be called around every resolver,
// useful for logging.
func ResolverMiddleware(middleware graphql.FieldMiddleware) Option {
@@ -243,19 +251,23 @@ func CacheSize(size int) Option {
}
}
-const DefaultCacheSize = 1000
-
-// WebsocketKeepAliveDuration allows you to reconfigure the keepAlive behavior.
-// By default, keep-alive is disabled.
+// WebsocketKeepAliveDuration allows you to reconfigure the keepalive behavior.
+// By default, keepalive is enabled with a DefaultConnectionKeepAlivePingInterval
+// duration. Set handler.connectionKeepAlivePingInterval = 0 to disable keepalive
+// altogether.
func WebsocketKeepAliveDuration(duration time.Duration) Option {
return func(cfg *Config) {
cfg.connectionKeepAlivePingInterval = duration
}
}
+const DefaultCacheSize = 1000
+const DefaultConnectionKeepAlivePingInterval = 25 * time.Second
+
func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc {
cfg := &Config{
- cacheSize: DefaultCacheSize,
+ cacheSize: DefaultCacheSize,
+ connectionKeepAlivePingInterval: DefaultConnectionKeepAlivePingInterval,
upgrader: websocket.Upgrader{
ReadBufferSize: 1024,
WriteBufferSize: 1024,
@@ -269,7 +281,7 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
var cache *lru.Cache
if cfg.cacheSize > 0 {
var err error
- cache, err = lru.New(DefaultCacheSize)
+ cache, err = lru.New(cfg.cacheSize)
if err != nil {
// An error is only returned for non-positive cache size
// and we already checked for that.
@@ -305,10 +317,11 @@ func (gh *graphqlHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
}
if strings.Contains(r.Header.Get("Upgrade"), "websocket") {
- connectWs(gh.exec, w, r, gh.cfg)
+ connectWs(gh.exec, w, r, gh.cfg, gh.cache)
return
}
+ w.Header().Set("Content-Type", "application/json")
var reqParams params
switch r.Method {
case http.MethodGet:
@@ -330,7 +343,6 @@ func (gh *graphqlHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusMethodNotAllowed)
return
}
- w.Header().Set("Content-Type", "application/json")
ctx := r.Context()
@@ -379,6 +391,10 @@ func (gh *graphqlHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
}
}()
+ if gh.cfg.complexityLimitFunc != nil {
+ reqCtx.ComplexityLimit = gh.cfg.complexityLimitFunc(ctx)
+ }
+
if reqCtx.ComplexityLimit > 0 && reqCtx.OperationComplexity > reqCtx.ComplexityLimit {
sendErrorf(w, http.StatusUnprocessableEntity, "operation has complexity %d, which exceeds the limit of %d", reqCtx.OperationComplexity, reqCtx.ComplexityLimit)
return
diff --git a/vendor/github.com/99designs/gqlgen/handler/playground.go b/vendor/github.com/99designs/gqlgen/handler/playground.go
index f1687def..0e1ca768 100644
--- a/vendor/github.com/99designs/gqlgen/handler/playground.go
+++ b/vendor/github.com/99designs/gqlgen/handler/playground.go
@@ -11,9 +11,12 @@ var page = template.Must(template.New("graphiql").Parse(`<!DOCTYPE html>
<meta charset=utf-8/>
<meta name="viewport" content="user-scalable=no, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, minimal-ui">
<link rel="shortcut icon" href="https://graphcool-playground.netlify.com/favicon.png">
- <link rel="stylesheet" href="//cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/static/css/index.css"/>
- <link rel="shortcut icon" href="//cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/favicon.png"/>
- <script src="//cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/static/js/middleware.js"></script>
+ <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/static/css/index.css"
+ integrity="{{ .cssSRI }}" crossorigin="anonymous"/>
+ <link rel="shortcut icon" href="https://cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/favicon.png"
+ integrity="{{ .faviconSRI }}" crossorigin="anonymous"/>
+ <script src="https://cdn.jsdelivr.net/npm/graphql-playground-react@{{ .version }}/build/static/js/middleware.js"
+ integrity="{{ .jsSRI }}" crossorigin="anonymous"></script>
<title>{{.title}}</title>
</head>
<body>
@@ -42,10 +45,14 @@ var page = template.Must(template.New("graphiql").Parse(`<!DOCTYPE html>
func Playground(title string, endpoint string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add("Content-Type", "text/html")
err := page.Execute(w, map[string]string{
- "title": title,
- "endpoint": endpoint,
- "version": "1.7.8",
+ "title": title,
+ "endpoint": endpoint,
+ "version": "1.7.20",
+ "cssSRI": "sha256-cS9Vc2OBt9eUf4sykRWukeFYaInL29+myBmFDSa7F/U=",
+ "faviconSRI": "sha256-GhTyE+McTU79R4+pRO6ih+4TfsTOrpPwD8ReKFzb3PM=",
+ "jsSRI": "sha256-4QG1Uza2GgGdlBL3RCBCGtGeZB6bDbsw8OltCMGeJsA=",
})
if err != nil {
panic(err)
diff --git a/vendor/github.com/99designs/gqlgen/handler/websocket.go b/vendor/github.com/99designs/gqlgen/handler/websocket.go
index 09800c17..58f38e5d 100644
--- a/vendor/github.com/99designs/gqlgen/handler/websocket.go
+++ b/vendor/github.com/99designs/gqlgen/handler/websocket.go
@@ -12,6 +12,7 @@ import (
"github.com/99designs/gqlgen/graphql"
"github.com/gorilla/websocket"
+ "github.com/hashicorp/golang-lru"
"github.com/vektah/gqlparser"
"github.com/vektah/gqlparser/ast"
"github.com/vektah/gqlparser/gqlerror"
@@ -44,12 +45,13 @@ type wsConnection struct {
active map[string]context.CancelFunc
mu sync.Mutex
cfg *Config
+ cache *lru.Cache
keepAliveTicker *time.Ticker
initPayload InitPayload
}
-func connectWs(exec graphql.ExecutableSchema, w http.ResponseWriter, r *http.Request, cfg *Config) {
+func connectWs(exec graphql.ExecutableSchema, w http.ResponseWriter, r *http.Request, cfg *Config, cache *lru.Cache) {
ws, err := cfg.upgrader.Upgrade(w, r, http.Header{
"Sec-Websocket-Protocol": []string{"graphql-ws"},
})
@@ -65,6 +67,7 @@ func connectWs(exec graphql.ExecutableSchema, w http.ResponseWriter, r *http.Req
conn: ws,
ctx: r.Context(),
cfg: cfg,
+ cache: cache,
}
if !conn.init() {
@@ -176,10 +179,27 @@ func (c *wsConnection) subscribe(message *operationMessage) bool {
return false
}
- doc, qErr := gqlparser.LoadQuery(c.exec.Schema(), reqParams.Query)
- if qErr != nil {
- c.sendError(message.ID, qErr...)
- return true
+ var (
+ doc *ast.QueryDocument
+ cacheHit bool
+ )
+ if c.cache != nil {
+ val, ok := c.cache.Get(reqParams.Query)
+ if ok {
+ doc = val.(*ast.QueryDocument)
+ cacheHit = true
+ }
+ }
+ if !cacheHit {
+ var qErr gqlerror.List
+ doc, qErr = gqlparser.LoadQuery(c.exec.Schema(), reqParams.Query)
+ if qErr != nil {
+ c.sendError(message.ID, qErr...)
+ return true
+ }
+ if c.cache != nil {
+ c.cache.Add(reqParams.Query, doc)
+ }
}
op := doc.Operations.ForName(reqParams.OperationName)
diff --git a/vendor/github.com/99designs/gqlgen/internal/code/compare.go b/vendor/github.com/99designs/gqlgen/internal/code/compare.go
new file mode 100644
index 00000000..dce9aea5
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/internal/code/compare.go
@@ -0,0 +1,163 @@
+package code
+
+import (
+ "fmt"
+ "go/types"
+)
+
+// CompatibleTypes isnt a strict comparison, it allows for pointer differences
+func CompatibleTypes(expected types.Type, actual types.Type) error {
+ //fmt.Println("Comparing ", expected.String(), actual.String())
+
+ // Special case to deal with pointer mismatches
+ {
+ expectedPtr, expectedIsPtr := expected.(*types.Pointer)
+ actualPtr, actualIsPtr := actual.(*types.Pointer)
+
+ if expectedIsPtr && actualIsPtr {
+ return CompatibleTypes(expectedPtr.Elem(), actualPtr.Elem())
+ }
+ if expectedIsPtr && !actualIsPtr {
+ return CompatibleTypes(expectedPtr.Elem(), actual)
+ }
+ if !expectedIsPtr && actualIsPtr {
+ return CompatibleTypes(expected, actualPtr.Elem())
+ }
+ }
+
+ switch expected := expected.(type) {
+ case *types.Slice:
+ if actual, ok := actual.(*types.Slice); ok {
+ return CompatibleTypes(expected.Elem(), actual.Elem())
+ }
+
+ case *types.Array:
+ if actual, ok := actual.(*types.Array); ok {
+ if expected.Len() != actual.Len() {
+ return fmt.Errorf("array length differs")
+ }
+
+ return CompatibleTypes(expected.Elem(), actual.Elem())
+ }
+
+ case *types.Basic:
+ if actual, ok := actual.(*types.Basic); ok {
+ if actual.Kind() != expected.Kind() {
+ return fmt.Errorf("basic kind differs, %s != %s", expected.Name(), actual.Name())
+ }
+
+ return nil
+ }
+
+ case *types.Struct:
+ if actual, ok := actual.(*types.Struct); ok {
+ if expected.NumFields() != actual.NumFields() {
+ return fmt.Errorf("number of struct fields differ")
+ }
+
+ for i := 0; i < expected.NumFields(); i++ {
+ if expected.Field(i).Name() != actual.Field(i).Name() {
+ return fmt.Errorf("struct field %d name differs, %s != %s", i, expected.Field(i).Name(), actual.Field(i).Name())
+ }
+ if err := CompatibleTypes(expected.Field(i).Type(), actual.Field(i).Type()); err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+
+ case *types.Tuple:
+ if actual, ok := actual.(*types.Tuple); ok {
+ if expected.Len() != actual.Len() {
+ return fmt.Errorf("tuple length differs, %d != %d", expected.Len(), actual.Len())
+ }
+
+ for i := 0; i < expected.Len(); i++ {
+ if err := CompatibleTypes(expected.At(i).Type(), actual.At(i).Type()); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+
+ case *types.Signature:
+ if actual, ok := actual.(*types.Signature); ok {
+ if err := CompatibleTypes(expected.Params(), actual.Params()); err != nil {
+ return err
+ }
+ if err := CompatibleTypes(expected.Results(), actual.Results()); err != nil {
+ return err
+ }
+
+ return nil
+ }
+ case *types.Interface:
+ if actual, ok := actual.(*types.Interface); ok {
+ if expected.NumMethods() != actual.NumMethods() {
+ return fmt.Errorf("interface method count differs, %d != %d", expected.NumMethods(), actual.NumMethods())
+ }
+
+ for i := 0; i < expected.NumMethods(); i++ {
+ if expected.Method(i).Name() != actual.Method(i).Name() {
+ return fmt.Errorf("interface method %d name differs, %s != %s", i, expected.Method(i).Name(), actual.Method(i).Name())
+ }
+ if err := CompatibleTypes(expected.Method(i).Type(), actual.Method(i).Type()); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+
+ case *types.Map:
+ if actual, ok := actual.(*types.Map); ok {
+ if err := CompatibleTypes(expected.Key(), actual.Key()); err != nil {
+ return err
+ }
+
+ if err := CompatibleTypes(expected.Elem(), actual.Elem()); err != nil {
+ return err
+ }
+
+ return nil
+ }
+
+ case *types.Chan:
+ if actual, ok := actual.(*types.Chan); ok {
+ return CompatibleTypes(expected.Elem(), actual.Elem())
+ }
+
+ case *types.Named:
+ if actual, ok := actual.(*types.Named); ok {
+ if NormalizeVendor(expected.Obj().Pkg().Path()) != NormalizeVendor(actual.Obj().Pkg().Path()) {
+ return fmt.Errorf(
+ "package name of named type differs, %s != %s",
+ NormalizeVendor(expected.Obj().Pkg().Path()),
+ NormalizeVendor(actual.Obj().Pkg().Path()),
+ )
+ }
+
+ if expected.Obj().Name() != actual.Obj().Name() {
+ return fmt.Errorf(
+ "named type name differs, %s != %s",
+ NormalizeVendor(expected.Obj().Name()),
+ NormalizeVendor(actual.Obj().Name()),
+ )
+ }
+
+ return nil
+ }
+
+ // Before models are generated all missing references will be Invalid Basic references.
+ // lets assume these are valid too.
+ if actual, ok := actual.(*types.Basic); ok && actual.Kind() == types.Invalid {
+ return nil
+ }
+
+ default:
+ return fmt.Errorf("missing support for %T", expected)
+ }
+
+ return fmt.Errorf("type mismatch %T != %T", expected, actual)
+}
diff --git a/vendor/github.com/99designs/gqlgen/internal/code/imports.go b/vendor/github.com/99designs/gqlgen/internal/code/imports.go
new file mode 100644
index 00000000..2384e87d
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/internal/code/imports.go
@@ -0,0 +1,60 @@
+package code
+
+import (
+ "errors"
+ "path/filepath"
+ "sync"
+
+ "golang.org/x/tools/go/packages"
+)
+
+var pathForDirCache = sync.Map{}
+
+// ImportPathFromDir takes an *absolute* path and returns a golang import path for the package, and returns an error if it isn't on the gopath
+func ImportPathForDir(dir string) string {
+ if v, ok := pathForDirCache.Load(dir); ok {
+ return v.(string)
+ }
+
+ p, _ := packages.Load(&packages.Config{
+ Dir: dir,
+ }, ".")
+
+ // If the dir dosent exist yet, keep walking up the directory tree trying to find a match
+ if len(p) != 1 {
+ parent, err := filepath.Abs(filepath.Join(dir, ".."))
+ if err != nil {
+ panic(err)
+ }
+ // Walked all the way to the root and didnt find anything :'(
+ if parent == dir {
+ return ""
+ }
+ return ImportPathForDir(parent) + "/" + filepath.Base(dir)
+ }
+
+ pathForDirCache.Store(dir, p[0].PkgPath)
+
+ return p[0].PkgPath
+}
+
+var nameForPackageCache = sync.Map{}
+
+func NameForPackage(importPath string) string {
+ if importPath == "" {
+ panic(errors.New("import path can not be empty"))
+ }
+ if v, ok := nameForPackageCache.Load(importPath); ok {
+ return v.(string)
+ }
+ importPath = QualifyPackagePath(importPath)
+ p, _ := packages.Load(nil, importPath)
+
+ if len(p) != 1 || p[0].Name == "" {
+ return SanitizePackageName(filepath.Base(importPath))
+ }
+
+ nameForPackageCache.Store(importPath, p[0].Name)
+
+ return p[0].Name
+}
diff --git a/vendor/github.com/99designs/gqlgen/internal/code/util.go b/vendor/github.com/99designs/gqlgen/internal/code/util.go
new file mode 100644
index 00000000..2be83a23
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/internal/code/util.go
@@ -0,0 +1,56 @@
+package code
+
+import (
+ "go/build"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+)
+
+// take a string in the form github.com/package/blah.Type and split it into package and type
+func PkgAndType(name string) (string, string) {
+ parts := strings.Split(name, ".")
+ if len(parts) == 1 {
+ return "", name
+ }
+
+ return strings.Join(parts[:len(parts)-1], "."), parts[len(parts)-1]
+}
+
+var modsRegex = regexp.MustCompile(`^(\*|\[\])*`)
+
+// NormalizeVendor takes a qualified package path and turns it into normal one.
+// eg .
+// github.com/foo/vendor/github.com/99designs/gqlgen/graphql becomes
+// github.com/99designs/gqlgen/graphql
+func NormalizeVendor(pkg string) string {
+ modifiers := modsRegex.FindAllString(pkg, 1)[0]
+ pkg = strings.TrimPrefix(pkg, modifiers)
+ parts := strings.Split(pkg, "/vendor/")
+ return modifiers + parts[len(parts)-1]
+}
+
+// QualifyPackagePath takes an import and fully qualifies it with a vendor dir, if one is required.
+// eg .
+// github.com/99designs/gqlgen/graphql becomes
+// github.com/foo/vendor/github.com/99designs/gqlgen/graphql
+//
+// x/tools/packages only supports 'qualified package paths' so this will need to be done prior to calling it
+// See https://github.com/golang/go/issues/30289
+func QualifyPackagePath(importPath string) string {
+ wd, _ := os.Getwd()
+
+ pkg, err := build.Import(importPath, wd, 0)
+ if err != nil {
+ return importPath
+ }
+
+ return pkg.ImportPath
+}
+
+var invalidPackageNameChar = regexp.MustCompile(`[^\w]`)
+
+func SanitizePackageName(pkg string) string {
+ return invalidPackageNameChar.ReplaceAllLiteralString(filepath.Base(pkg), "_")
+}
diff --git a/vendor/github.com/99designs/gqlgen/internal/gopath/gopath.go b/vendor/github.com/99designs/gqlgen/internal/gopath/gopath.go
deleted file mode 100644
index c9b66167..00000000
--- a/vendor/github.com/99designs/gqlgen/internal/gopath/gopath.go
+++ /dev/null
@@ -1,37 +0,0 @@
-package gopath
-
-import (
- "fmt"
- "go/build"
- "path/filepath"
- "strings"
-)
-
-var NotFound = fmt.Errorf("not on GOPATH")
-
-// Contains returns true if the given directory is in the GOPATH
-func Contains(dir string) bool {
- _, err := Dir2Import(dir)
- return err == nil
-}
-
-// Dir2Import takes an *absolute* path and returns a golang import path for the package, and returns an error if it isn't on the gopath
-func Dir2Import(dir string) (string, error) {
- dir = filepath.ToSlash(dir)
- for _, gopath := range filepath.SplitList(build.Default.GOPATH) {
- gopath = filepath.ToSlash(filepath.Join(gopath, "src"))
- if len(gopath) < len(dir) && strings.EqualFold(gopath, dir[0:len(gopath)]) {
- return dir[len(gopath)+1:], nil
- }
- }
- return "", NotFound
-}
-
-// MustDir2Import takes an *absolute* path and returns a golang import path for the package, and panics if it isn't on the gopath
-func MustDir2Import(dir string) string {
- pkg, err := Dir2Import(dir)
- if err != nil {
- panic(err)
- }
- return pkg
-}
diff --git a/vendor/github.com/99designs/gqlgen/internal/imports/prune.go b/vendor/github.com/99designs/gqlgen/internal/imports/prune.go
index d2469e83..d678870e 100644
--- a/vendor/github.com/99designs/gqlgen/internal/imports/prune.go
+++ b/vendor/github.com/99designs/gqlgen/internal/imports/prune.go
@@ -5,16 +5,15 @@ package imports
import (
"bytes"
"go/ast"
- "go/build"
"go/parser"
"go/printer"
"go/token"
- "path/filepath"
"strings"
- "golang.org/x/tools/imports"
+ "github.com/99designs/gqlgen/internal/code"
"golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/imports"
)
type visitFn func(node ast.Node)
@@ -54,12 +53,6 @@ func getUnusedImports(file ast.Node, filename string) (map[string]string, error)
imported := map[string]*ast.ImportSpec{}
used := map[string]bool{}
- abs, err := filepath.Abs(filename)
- if err != nil {
- return nil, err
- }
- srcDir := filepath.Dir(abs)
-
ast.Walk(visitFn(func(node ast.Node) {
if node == nil {
return
@@ -75,7 +68,7 @@ func getUnusedImports(file ast.Node, filename string) (map[string]string, error)
break
}
- local := importPathToName(ipath, srcDir)
+ local := code.NameForPackage(ipath)
imported[local] = v
case *ast.SelectorExpr:
@@ -108,12 +101,3 @@ func getUnusedImports(file ast.Node, filename string) (map[string]string, error)
return unusedImport, nil
}
-
-func importPathToName(importPath, srcDir string) (packageName string) {
- pkg, err := build.Default.Import(importPath, srcDir, 0)
- if err != nil {
- return ""
- }
-
- return pkg.Name
-}
diff --git a/vendor/github.com/99designs/gqlgen/plugin/modelgen/models.go b/vendor/github.com/99designs/gqlgen/plugin/modelgen/models.go
new file mode 100644
index 00000000..508cc14d
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/plugin/modelgen/models.go
@@ -0,0 +1,207 @@
+package modelgen
+
+import (
+ "go/types"
+ "sort"
+
+ "github.com/99designs/gqlgen/codegen/config"
+ "github.com/99designs/gqlgen/codegen/templates"
+ "github.com/99designs/gqlgen/internal/code"
+ "github.com/99designs/gqlgen/plugin"
+ "github.com/vektah/gqlparser/ast"
+)
+
+type ModelBuild struct {
+ PackageName string
+ Interfaces []*Interface
+ Models []*Object
+ Enums []*Enum
+ Scalars []string
+}
+
+type Interface struct {
+ Description string
+ Name string
+}
+
+type Object struct {
+ Description string
+ Name string
+ Fields []*Field
+ Implements []string
+}
+
+type Field struct {
+ Description string
+ Name string
+ Type types.Type
+ Tag string
+}
+
+type Enum struct {
+ Description string
+ Name string
+ Values []*EnumValue
+}
+
+type EnumValue struct {
+ Description string
+ Name string
+}
+
+func New() plugin.Plugin {
+ return &Plugin{}
+}
+
+type Plugin struct{}
+
+var _ plugin.ConfigMutator = &Plugin{}
+
+func (m *Plugin) Name() string {
+ return "modelgen"
+}
+
+func (m *Plugin) MutateConfig(cfg *config.Config) error {
+ if err := cfg.Check(); err != nil {
+ return err
+ }
+
+ schema, _, err := cfg.LoadSchema()
+ if err != nil {
+ return err
+ }
+
+ cfg.InjectBuiltins(schema)
+
+ binder, err := cfg.NewBinder(schema)
+ if err != nil {
+ return err
+ }
+
+ b := &ModelBuild{
+ PackageName: cfg.Model.Package,
+ }
+
+ for _, schemaType := range schema.Types {
+ if cfg.Models.UserDefined(schemaType.Name) {
+ continue
+ }
+
+ switch schemaType.Kind {
+ case ast.Interface, ast.Union:
+ it := &Interface{
+ Description: schemaType.Description,
+ Name: schemaType.Name,
+ }
+
+ b.Interfaces = append(b.Interfaces, it)
+ case ast.Object, ast.InputObject:
+ if schemaType == schema.Query || schemaType == schema.Mutation || schemaType == schema.Subscription {
+ continue
+ }
+ it := &Object{
+ Description: schemaType.Description,
+ Name: schemaType.Name,
+ }
+
+ for _, implementor := range schema.GetImplements(schemaType) {
+ it.Implements = append(it.Implements, implementor.Name)
+ }
+
+ for _, field := range schemaType.Fields {
+ var typ types.Type
+
+ if cfg.Models.UserDefined(field.Type.Name()) {
+ pkg, typeName := code.PkgAndType(cfg.Models[field.Type.Name()].Model[0])
+ typ, err = binder.FindType(pkg, typeName)
+ if err != nil {
+ return err
+ }
+ } else {
+ fieldDef := schema.Types[field.Type.Name()]
+ switch fieldDef.Kind {
+ case ast.Scalar:
+ // no user defined model, referencing a default scalar
+ typ = types.NewNamed(
+ types.NewTypeName(0, cfg.Model.Pkg(), "string", nil),
+ nil,
+ nil,
+ )
+ case ast.Interface, ast.Union:
+ // no user defined model, referencing a generated interface type
+ typ = types.NewNamed(
+ types.NewTypeName(0, cfg.Model.Pkg(), templates.ToGo(field.Type.Name()), nil),
+ types.NewInterfaceType([]*types.Func{}, []types.Type{}),
+ nil,
+ )
+ default:
+ // no user defined model, must reference another generated model
+ typ = types.NewNamed(
+ types.NewTypeName(0, cfg.Model.Pkg(), templates.ToGo(field.Type.Name()), nil),
+ nil,
+ nil,
+ )
+ }
+ }
+
+ name := field.Name
+ if nameOveride := cfg.Models[schemaType.Name].Fields[field.Name].FieldName; nameOveride != "" {
+ name = nameOveride
+ }
+
+ it.Fields = append(it.Fields, &Field{
+ Name: name,
+ Type: binder.CopyModifiersFromAst(field.Type, typ),
+ Description: field.Description,
+ Tag: `json:"` + field.Name + `"`,
+ })
+ }
+
+ b.Models = append(b.Models, it)
+ case ast.Enum:
+ it := &Enum{
+ Name: schemaType.Name,
+ Description: schemaType.Description,
+ }
+
+ for _, v := range schemaType.EnumValues {
+ it.Values = append(it.Values, &EnumValue{
+ Name: v.Name,
+ Description: v.Description,
+ })
+ }
+
+ b.Enums = append(b.Enums, it)
+ case ast.Scalar:
+ b.Scalars = append(b.Scalars, schemaType.Name)
+ }
+ }
+
+ sort.Slice(b.Enums, func(i, j int) bool { return b.Enums[i].Name < b.Enums[j].Name })
+ sort.Slice(b.Models, func(i, j int) bool { return b.Models[i].Name < b.Models[j].Name })
+ sort.Slice(b.Interfaces, func(i, j int) bool { return b.Interfaces[i].Name < b.Interfaces[j].Name })
+
+ for _, it := range b.Enums {
+ cfg.Models.Add(it.Name, cfg.Model.ImportPath()+"."+templates.ToGo(it.Name))
+ }
+ for _, it := range b.Models {
+ cfg.Models.Add(it.Name, cfg.Model.ImportPath()+"."+templates.ToGo(it.Name))
+ }
+ for _, it := range b.Interfaces {
+ cfg.Models.Add(it.Name, cfg.Model.ImportPath()+"."+templates.ToGo(it.Name))
+ }
+ for _, it := range b.Scalars {
+ cfg.Models.Add(it, "github.com/99designs/gqlgen/graphql.String")
+ }
+
+ if len(b.Models) == 0 && len(b.Enums) == 0 {
+ return nil
+ }
+
+ return templates.Render(templates.Options{
+ PackageName: cfg.Model.Package,
+ Filename: cfg.Model.Filename,
+ Data: b,
+ GeneratedHeader: true,
+ })
+}
diff --git a/vendor/github.com/99designs/gqlgen/plugin/modelgen/models.gotpl b/vendor/github.com/99designs/gqlgen/plugin/modelgen/models.gotpl
new file mode 100644
index 00000000..d06cf050
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/plugin/modelgen/models.gotpl
@@ -0,0 +1,85 @@
+{{ reserveImport "context" }}
+{{ reserveImport "fmt" }}
+{{ reserveImport "io" }}
+{{ reserveImport "strconv" }}
+{{ reserveImport "time" }}
+{{ reserveImport "sync" }}
+{{ reserveImport "errors" }}
+{{ reserveImport "bytes" }}
+
+{{ reserveImport "github.com/vektah/gqlparser" }}
+{{ reserveImport "github.com/vektah/gqlparser/ast" }}
+{{ reserveImport "github.com/99designs/gqlgen/graphql" }}
+{{ reserveImport "github.com/99designs/gqlgen/graphql/introspection" }}
+
+{{- range $model := .Interfaces }}
+ {{ with .Description }} {{.|prefixLines "// "}} {{ end }}
+ type {{.Name|go }} interface {
+ Is{{.Name|go }}()
+ }
+{{- end }}
+
+{{ range $model := .Models }}
+ {{with .Description }} {{.|prefixLines "// "}} {{end}}
+ type {{ .Name|go }} struct {
+ {{- range $field := .Fields }}
+ {{- with .Description }}
+ {{.|prefixLines "// "}}
+ {{- end}}
+ {{ $field.Name|go }} {{$field.Type | ref}} `{{$field.Tag}}`
+ {{- end }}
+ }
+
+ {{- range $iface := .Implements }}
+ func ({{ $model.Name|go }}) Is{{ $iface }}() {}
+ {{- end }}
+{{- end}}
+
+{{ range $enum := .Enums }}
+ {{ with .Description|go }} {{.|prefixLines "// "}} {{end}}
+ type {{.Name|go }} string
+ const (
+ {{- range $value := .Values}}
+ {{- with .Description}}
+ {{.|prefixLines "// "}}
+ {{- end}}
+ {{ $enum.Name|go }}{{ .Name|go }} {{$enum.Name|go }} = {{.Name|quote}}
+ {{- end }}
+ )
+
+ var All{{.Name|go }} = []{{ .Name|go }}{
+ {{- range $value := .Values}}
+ {{$enum.Name|go }}{{ .Name|go }},
+ {{- end }}
+ }
+
+ func (e {{.Name|go }}) IsValid() bool {
+ switch e {
+ case {{ range $index, $element := .Values}}{{if $index}},{{end}}{{ $enum.Name|go }}{{ $element.Name|go }}{{end}}:
+ return true
+ }
+ return false
+ }
+
+ func (e {{.Name|go }}) String() string {
+ return string(e)
+ }
+
+ func (e *{{.Name|go }}) UnmarshalGQL(v interface{}) error {
+ str, ok := v.(string)
+ if !ok {
+ return fmt.Errorf("enums must be strings")
+ }
+
+ *e = {{ .Name|go }}(str)
+ if !e.IsValid() {
+ return fmt.Errorf("%s is not a valid {{ .Name }}", str)
+ }
+ return nil
+ }
+
+ func (e {{.Name|go }}) MarshalGQL(w io.Writer) {
+ fmt.Fprint(w, strconv.Quote(e.String()))
+ }
+
+{{- end }}
diff --git a/vendor/github.com/99designs/gqlgen/plugin/plugin.go b/vendor/github.com/99designs/gqlgen/plugin/plugin.go
new file mode 100644
index 00000000..a84bfd32
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/plugin/plugin.go
@@ -0,0 +1,20 @@
+// plugin package interfaces are EXPERIMENTAL.
+
+package plugin
+
+import (
+ "github.com/99designs/gqlgen/codegen"
+ "github.com/99designs/gqlgen/codegen/config"
+)
+
+type Plugin interface {
+ Name() string
+}
+
+type ConfigMutator interface {
+ MutateConfig(cfg *config.Config) error
+}
+
+type CodeGenerator interface {
+ GenerateCode(cfg *codegen.Data) error
+}
diff --git a/vendor/github.com/99designs/gqlgen/plugin/resolvergen/resolver.go b/vendor/github.com/99designs/gqlgen/plugin/resolvergen/resolver.go
new file mode 100644
index 00000000..00a6d5c9
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/plugin/resolvergen/resolver.go
@@ -0,0 +1,53 @@
+package resolvergen
+
+import (
+ "log"
+ "os"
+
+ "github.com/99designs/gqlgen/codegen"
+ "github.com/99designs/gqlgen/codegen/templates"
+ "github.com/99designs/gqlgen/plugin"
+ "github.com/pkg/errors"
+)
+
+func New() plugin.Plugin {
+ return &Plugin{}
+}
+
+type Plugin struct{}
+
+var _ plugin.CodeGenerator = &Plugin{}
+
+func (m *Plugin) Name() string {
+ return "resovlergen"
+}
+func (m *Plugin) GenerateCode(data *codegen.Data) error {
+ if !data.Config.Resolver.IsDefined() {
+ return nil
+ }
+
+ resolverBuild := &ResolverBuild{
+ Data: data,
+ PackageName: data.Config.Resolver.Package,
+ ResolverType: data.Config.Resolver.Type,
+ }
+ filename := data.Config.Resolver.Filename
+
+ if _, err := os.Stat(filename); os.IsNotExist(errors.Cause(err)) {
+ return templates.Render(templates.Options{
+ PackageName: data.Config.Resolver.Package,
+ Filename: data.Config.Resolver.Filename,
+ Data: resolverBuild,
+ })
+ }
+
+ log.Printf("Skipped resolver: %s already exists\n", filename)
+ return nil
+}
+
+type ResolverBuild struct {
+ *codegen.Data
+
+ PackageName string
+ ResolverType string
+}
diff --git a/vendor/github.com/99designs/gqlgen/plugin/resolvergen/resolver.gotpl b/vendor/github.com/99designs/gqlgen/plugin/resolvergen/resolver.gotpl
new file mode 100644
index 00000000..7d95e690
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/plugin/resolvergen/resolver.gotpl
@@ -0,0 +1,40 @@
+// THIS CODE IS A STARTING POINT ONLY. IT WILL NOT BE UPDATED WITH SCHEMA CHANGES.
+
+{{ reserveImport "context" }}
+{{ reserveImport "fmt" }}
+{{ reserveImport "io" }}
+{{ reserveImport "strconv" }}
+{{ reserveImport "time" }}
+{{ reserveImport "sync" }}
+{{ reserveImport "errors" }}
+{{ reserveImport "bytes" }}
+
+{{ reserveImport "github.com/99designs/gqlgen/handler" }}
+{{ reserveImport "github.com/vektah/gqlparser" }}
+{{ reserveImport "github.com/vektah/gqlparser/ast" }}
+{{ reserveImport "github.com/99designs/gqlgen/graphql" }}
+{{ reserveImport "github.com/99designs/gqlgen/graphql/introspection" }}
+
+type {{.ResolverType}} struct {}
+
+{{ range $object := .Objects -}}
+ {{- if $object.HasResolvers -}}
+ func (r *{{$.ResolverType}}) {{$object.Name}}() {{ $object.ResolverInterface | ref }} {
+ return &{{lcFirst $object.Name}}Resolver{r}
+ }
+ {{ end -}}
+{{ end }}
+
+{{ range $object := .Objects -}}
+ {{- if $object.HasResolvers -}}
+ type {{lcFirst $object.Name}}Resolver struct { *Resolver }
+
+ {{ range $field := $object.Fields -}}
+ {{- if $field.IsResolver -}}
+ func (r *{{lcFirst $object.Name}}Resolver) {{$field.GoFieldName}}{{ $field.ShortResolverDeclaration }} {
+ panic("not implemented")
+ }
+ {{ end -}}
+ {{ end -}}
+ {{ end -}}
+{{ end }}
diff --git a/vendor/github.com/99designs/gqlgen/plugin/servergen/server.go b/vendor/github.com/99designs/gqlgen/plugin/servergen/server.go
new file mode 100644
index 00000000..22289c02
--- /dev/null
+++ b/vendor/github.com/99designs/gqlgen/plugin/servergen/server.go
@@ -0,0 +1,49 @@
+package servergen
+
+import (
+ "log"
+ "os"
+
+ "github.com/99designs/gqlgen/codegen"
+ "github.com/99designs/gqlgen/codegen/templates"
+ "github.com/99designs/gqlgen/plugin"
+ "github.com/pkg/errors"
+)
+
+func New(filename string) plugin.Plugin {
+ return &Plugin{filename}
+}
+
+type Plugin struct {
+ filename string
+}
+
+var _ plugin.CodeGenerator = &Plugin{}
+
+func (m *Plugin) Name() string {
+ return "servergen"
+}
+func (m *Plugin) GenerateCode(data *codegen.Data) error {
+ serverBuild := &ServerBuild{
+ ExecPackageName: data.Config.Exec.ImportPath(),
+ ResolverPackageName: data.Config.Resolver.ImportPath(),
+ }
+
+ if _, err := os.Stat(m.filename); os.IsNotExist(errors.Cause(err)) {
+ return templates.Render(templates.Options{
+ PackageName: "main",
+ Filename: m.filename,
+ Data: serverBuild,
+ })
+ }
+
+ log.Printf("Skipped server: %s already exists\n", m.filename)
+ return nil
+}
+
+type ServerBuild struct {
+ codegen.Data
+
+ ExecPackageName string
+ ResolverPackageName string
+}
diff --git a/vendor/github.com/99designs/gqlgen/codegen/templates/server.gotpl b/vendor/github.com/99designs/gqlgen/plugin/servergen/server.gotpl
index 38dc0d18..fca71c53 100644
--- a/vendor/github.com/99designs/gqlgen/codegen/templates/server.gotpl
+++ b/vendor/github.com/99designs/gqlgen/plugin/servergen/server.gotpl
@@ -1,14 +1,8 @@
-package main
-
-import (
- %%%IMPORTS%%%
-
- {{ reserveImport "context" }}
- {{ reserveImport "log" }}
- {{ reserveImport "net/http" }}
- {{ reserveImport "os" }}
- {{ reserveImport "github.com/99designs/gqlgen/handler" }}
-)
+{{ reserveImport "context" }}
+{{ reserveImport "log" }}
+{{ reserveImport "net/http" }}
+{{ reserveImport "os" }}
+{{ reserveImport "github.com/99designs/gqlgen/handler" }}
const defaultPort = "8080"
diff --git a/vendor/github.com/theckman/goconstraint/go1.8/gte/constraint.go b/vendor/github.com/theckman/goconstraint/go1.8/gte/constraint.go
deleted file mode 100644
index 24c248d6..00000000
--- a/vendor/github.com/theckman/goconstraint/go1.8/gte/constraint.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// The contents of this file has been released in to the Public Domain.
-
-// Package gtego18 should only be used as a blank import. If imported, it
-// will only compile if the Go runtime version is >= 1.8.
-package gtego18
-
-// This will fail to compile if the Go runtime version isn't >= 1.8.
-var _ = __SOFTWARE_REQUIRES_GO_VERSION_1_8__
diff --git a/vendor/github.com/theckman/goconstraint/go1.8/gte/go18.go b/vendor/github.com/theckman/goconstraint/go1.8/gte/go18.go
deleted file mode 100644
index 777fea4d..00000000
--- a/vendor/github.com/theckman/goconstraint/go1.8/gte/go18.go
+++ /dev/null
@@ -1,7 +0,0 @@
-// The contents of this file has been released in to the Public Domain.
-
-// +build go1.8
-
-package gtego18
-
-const __SOFTWARE_REQUIRES_GO_VERSION_1_8__ = uint8(0)
diff --git a/vendor/github.com/theckman/goconstraint/go1.9/gte/constraint.go b/vendor/github.com/theckman/goconstraint/go1.9/gte/constraint.go
new file mode 100644
index 00000000..ed67fefc
--- /dev/null
+++ b/vendor/github.com/theckman/goconstraint/go1.9/gte/constraint.go
@@ -0,0 +1,8 @@
+// The contents of this file has been released in to the Public Domain.
+
+// Package gtego19 should only be used as a blank import. If imported, it
+// will only compile if the Go runtime version is >= 1.9.
+package gtego19
+
+// This will fail to compile if the Go runtime version isn't >= 1.9.
+var _ = __SOFTWARE_REQUIRES_GO_VERSION_1_9__
diff --git a/vendor/github.com/theckman/goconstraint/go1.9/gte/go19.go b/vendor/github.com/theckman/goconstraint/go1.9/gte/go19.go
new file mode 100644
index 00000000..446a5ee3
--- /dev/null
+++ b/vendor/github.com/theckman/goconstraint/go1.9/gte/go19.go
@@ -0,0 +1,7 @@
+// The contents of this file has been released in to the Public Domain.
+
+// +build go1.9
+
+package gtego19
+
+const __SOFTWARE_REQUIRES_GO_VERSION_1_9__ = uint8(0)
diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go
deleted file mode 100644
index c0cb03e7..00000000
--- a/vendor/golang.org/x/tools/go/buildutil/allpackages.go
+++ /dev/null
@@ -1,198 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package buildutil provides utilities related to the go/build
-// package in the standard library.
-//
-// All I/O is done via the build.Context file system interface, which must
-// be concurrency-safe.
-package buildutil // import "golang.org/x/tools/go/buildutil"
-
-import (
- "go/build"
- "os"
- "path/filepath"
- "sort"
- "strings"
- "sync"
-)
-
-// AllPackages returns the package path of each Go package in any source
-// directory of the specified build context (e.g. $GOROOT or an element
-// of $GOPATH). Errors are ignored. The results are sorted.
-// All package paths are canonical, and thus may contain "/vendor/".
-//
-// The result may include import paths for directories that contain no
-// *.go files, such as "archive" (in $GOROOT/src).
-//
-// All I/O is done via the build.Context file system interface,
-// which must be concurrency-safe.
-//
-func AllPackages(ctxt *build.Context) []string {
- var list []string
- ForEachPackage(ctxt, func(pkg string, _ error) {
- list = append(list, pkg)
- })
- sort.Strings(list)
- return list
-}
-
-// ForEachPackage calls the found function with the package path of
-// each Go package it finds in any source directory of the specified
-// build context (e.g. $GOROOT or an element of $GOPATH).
-// All package paths are canonical, and thus may contain "/vendor/".
-//
-// If the package directory exists but could not be read, the second
-// argument to the found function provides the error.
-//
-// All I/O is done via the build.Context file system interface,
-// which must be concurrency-safe.
-//
-func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) {
- ch := make(chan item)
-
- var wg sync.WaitGroup
- for _, root := range ctxt.SrcDirs() {
- root := root
- wg.Add(1)
- go func() {
- allPackages(ctxt, root, ch)
- wg.Done()
- }()
- }
- go func() {
- wg.Wait()
- close(ch)
- }()
-
- // All calls to found occur in the caller's goroutine.
- for i := range ch {
- found(i.importPath, i.err)
- }
-}
-
-type item struct {
- importPath string
- err error // (optional)
-}
-
-// We use a process-wide counting semaphore to limit
-// the number of parallel calls to ReadDir.
-var ioLimit = make(chan bool, 20)
-
-func allPackages(ctxt *build.Context, root string, ch chan<- item) {
- root = filepath.Clean(root) + string(os.PathSeparator)
-
- var wg sync.WaitGroup
-
- var walkDir func(dir string)
- walkDir = func(dir string) {
- // Avoid .foo, _foo, and testdata directory trees.
- base := filepath.Base(dir)
- if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" {
- return
- }
-
- pkg := filepath.ToSlash(strings.TrimPrefix(dir, root))
-
- // Prune search if we encounter any of these import paths.
- switch pkg {
- case "builtin":
- return
- }
-
- ioLimit <- true
- files, err := ReadDir(ctxt, dir)
- <-ioLimit
- if pkg != "" || err != nil {
- ch <- item{pkg, err}
- }
- for _, fi := range files {
- fi := fi
- if fi.IsDir() {
- wg.Add(1)
- go func() {
- walkDir(filepath.Join(dir, fi.Name()))
- wg.Done()
- }()
- }
- }
- }
-
- walkDir(root)
- wg.Wait()
-}
-
-// ExpandPatterns returns the set of packages matched by patterns,
-// which may have the following forms:
-//
-// golang.org/x/tools/cmd/guru # a single package
-// golang.org/x/tools/... # all packages beneath dir
-// ... # the entire workspace.
-//
-// Order is significant: a pattern preceded by '-' removes matching
-// packages from the set. For example, these patterns match all encoding
-// packages except encoding/xml:
-//
-// encoding/... -encoding/xml
-//
-// A trailing slash in a pattern is ignored. (Path components of Go
-// package names are separated by slash, not the platform's path separator.)
-//
-func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool {
- // TODO(adonovan): support other features of 'go list':
- // - "std"/"cmd"/"all" meta-packages
- // - "..." not at the end of a pattern
- // - relative patterns using "./" or "../" prefix
-
- pkgs := make(map[string]bool)
- doPkg := func(pkg string, neg bool) {
- if neg {
- delete(pkgs, pkg)
- } else {
- pkgs[pkg] = true
- }
- }
-
- // Scan entire workspace if wildcards are present.
- // TODO(adonovan): opt: scan only the necessary subtrees of the workspace.
- var all []string
- for _, arg := range patterns {
- if strings.HasSuffix(arg, "...") {
- all = AllPackages(ctxt)
- break
- }
- }
-
- for _, arg := range patterns {
- if arg == "" {
- continue
- }
-
- neg := arg[0] == '-'
- if neg {
- arg = arg[1:]
- }
-
- if arg == "..." {
- // ... matches all packages
- for _, pkg := range all {
- doPkg(pkg, neg)
- }
- } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg {
- // dir/... matches all packages beneath dir
- for _, pkg := range all {
- if strings.HasPrefix(pkg, dir) &&
- (len(pkg) == len(dir) || pkg[len(dir)] == '/') {
- doPkg(pkg, neg)
- }
- }
- } else {
- // single package
- doPkg(strings.TrimSuffix(arg, "/"), neg)
- }
- }
-
- return pkgs
-}
diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
deleted file mode 100644
index 8b7f0667..00000000
--- a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
+++ /dev/null
@@ -1,109 +0,0 @@
-package buildutil
-
-import (
- "fmt"
- "go/build"
- "io"
- "io/ioutil"
- "os"
- "path"
- "path/filepath"
- "sort"
- "strings"
- "time"
-)
-
-// FakeContext returns a build.Context for the fake file tree specified
-// by pkgs, which maps package import paths to a mapping from file base
-// names to contents.
-//
-// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides
-// the necessary file access methods to read from memory instead of the
-// real file system.
-//
-// Unlike a real file tree, the fake one has only two levels---packages
-// and files---so ReadDir("/go/src/") returns all packages under
-// /go/src/ including, for instance, "math" and "math/big".
-// ReadDir("/go/src/math/big") would return all the files in the
-// "math/big" package.
-//
-func FakeContext(pkgs map[string]map[string]string) *build.Context {
- clean := func(filename string) string {
- f := path.Clean(filepath.ToSlash(filename))
- // Removing "/go/src" while respecting segment
- // boundaries has this unfortunate corner case:
- if f == "/go/src" {
- return ""
- }
- return strings.TrimPrefix(f, "/go/src/")
- }
-
- ctxt := build.Default // copy
- ctxt.GOROOT = "/go"
- ctxt.GOPATH = ""
- ctxt.Compiler = "gc"
- ctxt.IsDir = func(dir string) bool {
- dir = clean(dir)
- if dir == "" {
- return true // needed by (*build.Context).SrcDirs
- }
- return pkgs[dir] != nil
- }
- ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
- dir = clean(dir)
- var fis []os.FileInfo
- if dir == "" {
- // enumerate packages
- for importPath := range pkgs {
- fis = append(fis, fakeDirInfo(importPath))
- }
- } else {
- // enumerate files of package
- for basename := range pkgs[dir] {
- fis = append(fis, fakeFileInfo(basename))
- }
- }
- sort.Sort(byName(fis))
- return fis, nil
- }
- ctxt.OpenFile = func(filename string) (io.ReadCloser, error) {
- filename = clean(filename)
- dir, base := path.Split(filename)
- content, ok := pkgs[path.Clean(dir)][base]
- if !ok {
- return nil, fmt.Errorf("file not found: %s", filename)
- }
- return ioutil.NopCloser(strings.NewReader(content)), nil
- }
- ctxt.IsAbsPath = func(path string) bool {
- path = filepath.ToSlash(path)
- // Don't rely on the default (filepath.Path) since on
- // Windows, it reports virtual paths as non-absolute.
- return strings.HasPrefix(path, "/")
- }
- return &ctxt
-}
-
-type byName []os.FileInfo
-
-func (s byName) Len() int { return len(s) }
-func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
-func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
-
-type fakeFileInfo string
-
-func (fi fakeFileInfo) Name() string { return string(fi) }
-func (fakeFileInfo) Sys() interface{} { return nil }
-func (fakeFileInfo) ModTime() time.Time { return time.Time{} }
-func (fakeFileInfo) IsDir() bool { return false }
-func (fakeFileInfo) Size() int64 { return 0 }
-func (fakeFileInfo) Mode() os.FileMode { return 0644 }
-
-type fakeDirInfo string
-
-func (fd fakeDirInfo) Name() string { return string(fd) }
-func (fakeDirInfo) Sys() interface{} { return nil }
-func (fakeDirInfo) ModTime() time.Time { return time.Time{} }
-func (fakeDirInfo) IsDir() bool { return true }
-func (fakeDirInfo) Size() int64 { return 0 }
-func (fakeDirInfo) Mode() os.FileMode { return 0755 }
diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go
deleted file mode 100644
index 3f71c4fe..00000000
--- a/vendor/golang.org/x/tools/go/buildutil/overlay.go
+++ /dev/null
@@ -1,103 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package buildutil
-
-import (
- "bufio"
- "bytes"
- "fmt"
- "go/build"
- "io"
- "io/ioutil"
- "path/filepath"
- "strconv"
- "strings"
-)
-
-// OverlayContext overlays a build.Context with additional files from
-// a map. Files in the map take precedence over other files.
-//
-// In addition to plain string comparison, two file names are
-// considered equal if their base names match and their directory
-// components point at the same directory on the file system. That is,
-// symbolic links are followed for directories, but not files.
-//
-// A common use case for OverlayContext is to allow editors to pass in
-// a set of unsaved, modified files.
-//
-// Currently, only the Context.OpenFile function will respect the
-// overlay. This may change in the future.
-func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context {
- // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir
-
- rc := func(data []byte) (io.ReadCloser, error) {
- return ioutil.NopCloser(bytes.NewBuffer(data)), nil
- }
-
- copy := *orig // make a copy
- ctxt := &copy
- ctxt.OpenFile = func(path string) (io.ReadCloser, error) {
- // Fast path: names match exactly.
- if content, ok := overlay[path]; ok {
- return rc(content)
- }
-
- // Slow path: check for same file under a different
- // alias, perhaps due to a symbolic link.
- for filename, content := range overlay {
- if sameFile(path, filename) {
- return rc(content)
- }
- }
-
- return OpenFile(orig, path)
- }
- return ctxt
-}
-
-// ParseOverlayArchive parses an archive containing Go files and their
-// contents. The result is intended to be used with OverlayContext.
-//
-//
-// Archive format
-//
-// The archive consists of a series of files. Each file consists of a
-// name, a decimal file size and the file contents, separated by
-// newlinews. No newline follows after the file contents.
-func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) {
- overlay := make(map[string][]byte)
- r := bufio.NewReader(archive)
- for {
- // Read file name.
- filename, err := r.ReadString('\n')
- if err != nil {
- if err == io.EOF {
- break // OK
- }
- return nil, fmt.Errorf("reading archive file name: %v", err)
- }
- filename = filepath.Clean(strings.TrimSpace(filename))
-
- // Read file size.
- sz, err := r.ReadString('\n')
- if err != nil {
- return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err)
- }
- sz = strings.TrimSpace(sz)
- size, err := strconv.ParseUint(sz, 10, 32)
- if err != nil {
- return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err)
- }
-
- // Read file content.
- content := make([]byte, size)
- if _, err := io.ReadFull(r, content); err != nil {
- return nil, fmt.Errorf("reading archive file %s: %v", filename, err)
- }
- overlay[filename] = content
- }
-
- return overlay, nil
-}
diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go
deleted file mode 100644
index 486606f3..00000000
--- a/vendor/golang.org/x/tools/go/buildutil/tags.go
+++ /dev/null
@@ -1,75 +0,0 @@
-package buildutil
-
-// This logic was copied from stringsFlag from $GOROOT/src/cmd/go/build.go.
-
-import "fmt"
-
-const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " +
- "For more information about build tags, see the description of " +
- "build constraints in the documentation for the go/build package"
-
-// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses
-// a flag value in the same manner as go build's -tags flag and
-// populates a []string slice.
-//
-// See $GOROOT/src/go/build/doc.go for description of build tags.
-// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag.
-//
-// Example:
-// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
-type TagsFlag []string
-
-func (v *TagsFlag) Set(s string) error {
- var err error
- *v, err = splitQuotedFields(s)
- if *v == nil {
- *v = []string{}
- }
- return err
-}
-
-func (v *TagsFlag) Get() interface{} { return *v }
-
-func splitQuotedFields(s string) ([]string, error) {
- // Split fields allowing '' or "" around elements.
- // Quotes further inside the string do not count.
- var f []string
- for len(s) > 0 {
- for len(s) > 0 && isSpaceByte(s[0]) {
- s = s[1:]
- }
- if len(s) == 0 {
- break
- }
- // Accepted quoted string. No unescaping inside.
- if s[0] == '"' || s[0] == '\'' {
- quote := s[0]
- s = s[1:]
- i := 0
- for i < len(s) && s[i] != quote {
- i++
- }
- if i >= len(s) {
- return nil, fmt.Errorf("unterminated %c string", quote)
- }
- f = append(f, s[:i])
- s = s[i+1:]
- continue
- }
- i := 0
- for i < len(s) && !isSpaceByte(s[i]) {
- i++
- }
- f = append(f, s[:i])
- s = s[i:]
- }
- return f, nil
-}
-
-func (v *TagsFlag) String() string {
- return "<tagsFlag>"
-}
-
-func isSpaceByte(c byte) bool {
- return c == ' ' || c == '\t' || c == '\n' || c == '\r'
-}
diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go
deleted file mode 100644
index fc923d7a..00000000
--- a/vendor/golang.org/x/tools/go/buildutil/util.go
+++ /dev/null
@@ -1,212 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package buildutil
-
-import (
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "io"
- "io/ioutil"
- "os"
- "path"
- "path/filepath"
- "strings"
-)
-
-// ParseFile behaves like parser.ParseFile,
-// but uses the build context's file system interface, if any.
-//
-// If file is not absolute (as defined by IsAbsPath), the (dir, file)
-// components are joined using JoinPath; dir must be absolute.
-//
-// The displayPath function, if provided, is used to transform the
-// filename that will be attached to the ASTs.
-//
-// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws.
-//
-func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) {
- if !IsAbsPath(ctxt, file) {
- file = JoinPath(ctxt, dir, file)
- }
- rd, err := OpenFile(ctxt, file)
- if err != nil {
- return nil, err
- }
- defer rd.Close() // ignore error
- if displayPath != nil {
- file = displayPath(file)
- }
- return parser.ParseFile(fset, file, rd, mode)
-}
-
-// ContainingPackage returns the package containing filename.
-//
-// If filename is not absolute, it is interpreted relative to working directory dir.
-// All I/O is via the build context's file system interface, if any.
-//
-// The '...Files []string' fields of the resulting build.Package are not
-// populated (build.FindOnly mode).
-//
-func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) {
- if !IsAbsPath(ctxt, filename) {
- filename = JoinPath(ctxt, dir, filename)
- }
-
- // We must not assume the file tree uses
- // "/" always,
- // `\` always,
- // or os.PathSeparator (which varies by platform),
- // but to make any progress, we are forced to assume that
- // paths will not use `\` unless the PathSeparator
- // is also `\`, thus we can rely on filepath.ToSlash for some sanity.
-
- dirSlash := path.Dir(filepath.ToSlash(filename)) + "/"
-
- // We assume that no source root (GOPATH[i] or GOROOT) contains any other.
- for _, srcdir := range ctxt.SrcDirs() {
- srcdirSlash := filepath.ToSlash(srcdir) + "/"
- if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok {
- return ctxt.Import(importPath, dir, build.FindOnly)
- }
- }
-
- return nil, fmt.Errorf("can't find package containing %s", filename)
-}
-
-// -- Effective methods of file system interface -------------------------
-
-// (go/build.Context defines these as methods, but does not export them.)
-
-// hasSubdir calls ctxt.HasSubdir (if not nil) or else uses
-// the local file system to answer the question.
-func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) {
- if f := ctxt.HasSubdir; f != nil {
- return f(root, dir)
- }
-
- // Try using paths we received.
- if rel, ok = hasSubdir(root, dir); ok {
- return
- }
-
- // Try expanding symlinks and comparing
- // expanded against unexpanded and
- // expanded against expanded.
- rootSym, _ := filepath.EvalSymlinks(root)
- dirSym, _ := filepath.EvalSymlinks(dir)
-
- if rel, ok = hasSubdir(rootSym, dir); ok {
- return
- }
- if rel, ok = hasSubdir(root, dirSym); ok {
- return
- }
- return hasSubdir(rootSym, dirSym)
-}
-
-func hasSubdir(root, dir string) (rel string, ok bool) {
- const sep = string(filepath.Separator)
- root = filepath.Clean(root)
- if !strings.HasSuffix(root, sep) {
- root += sep
- }
-
- dir = filepath.Clean(dir)
- if !strings.HasPrefix(dir, root) {
- return "", false
- }
-
- return filepath.ToSlash(dir[len(root):]), true
-}
-
-// FileExists returns true if the specified file exists,
-// using the build context's file system interface.
-func FileExists(ctxt *build.Context, path string) bool {
- if ctxt.OpenFile != nil {
- r, err := ctxt.OpenFile(path)
- if err != nil {
- return false
- }
- r.Close() // ignore error
- return true
- }
- _, err := os.Stat(path)
- return err == nil
-}
-
-// OpenFile behaves like os.Open,
-// but uses the build context's file system interface, if any.
-func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) {
- if ctxt.OpenFile != nil {
- return ctxt.OpenFile(path)
- }
- return os.Open(path)
-}
-
-// IsAbsPath behaves like filepath.IsAbs,
-// but uses the build context's file system interface, if any.
-func IsAbsPath(ctxt *build.Context, path string) bool {
- if ctxt.IsAbsPath != nil {
- return ctxt.IsAbsPath(path)
- }
- return filepath.IsAbs(path)
-}
-
-// JoinPath behaves like filepath.Join,
-// but uses the build context's file system interface, if any.
-func JoinPath(ctxt *build.Context, path ...string) string {
- if ctxt.JoinPath != nil {
- return ctxt.JoinPath(path...)
- }
- return filepath.Join(path...)
-}
-
-// IsDir behaves like os.Stat plus IsDir,
-// but uses the build context's file system interface, if any.
-func IsDir(ctxt *build.Context, path string) bool {
- if ctxt.IsDir != nil {
- return ctxt.IsDir(path)
- }
- fi, err := os.Stat(path)
- return err == nil && fi.IsDir()
-}
-
-// ReadDir behaves like ioutil.ReadDir,
-// but uses the build context's file system interface, if any.
-func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) {
- if ctxt.ReadDir != nil {
- return ctxt.ReadDir(path)
- }
- return ioutil.ReadDir(path)
-}
-
-// SplitPathList behaves like filepath.SplitList,
-// but uses the build context's file system interface, if any.
-func SplitPathList(ctxt *build.Context, s string) []string {
- if ctxt.SplitPathList != nil {
- return ctxt.SplitPathList(s)
- }
- return filepath.SplitList(s)
-}
-
-// sameFile returns true if x and y have the same basename and denote
-// the same file.
-//
-func sameFile(x, y string) bool {
- if path.Clean(x) == path.Clean(y) {
- return true
- }
- if filepath.Base(x) == filepath.Base(y) { // (optimisation)
- if xi, err := os.Stat(x); err == nil {
- if yi, err := os.Stat(y); err == nil {
- return os.SameFile(xi, yi)
- }
- }
- }
- return false
-}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
new file mode 100644
index 00000000..4c238d10
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
@@ -0,0 +1,109 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gcexportdata provides functions for locating, reading, and
+// writing export data files containing type information produced by the
+// gc compiler. This package supports go1.7 export data format and all
+// later versions.
+//
+// Although it might seem convenient for this package to live alongside
+// go/types in the standard library, this would cause version skew
+// problems for developer tools that use it, since they must be able to
+// consume the outputs of the gc compiler both before and after a Go
+// update such as from Go 1.7 to Go 1.8. Because this package lives in
+// golang.org/x/tools, sites can update their version of this repo some
+// time before the Go 1.8 release and rebuild and redeploy their
+// developer tools, which will then be able to consume both Go 1.7 and
+// Go 1.8 export data files, so they will work before and after the
+// Go update. (See discussion at https://github.com/golang/go/issues/15651.)
+//
+package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/token"
+ "go/types"
+ "io"
+ "io/ioutil"
+
+ "golang.org/x/tools/go/internal/gcimporter"
+)
+
+// Find returns the name of an object (.o) or archive (.a) file
+// containing type information for the specified import path,
+// using the workspace layout conventions of go/build.
+// If no file was found, an empty filename is returned.
+//
+// A relative srcDir is interpreted relative to the current working directory.
+//
+// Find also returns the package's resolved (canonical) import path,
+// reflecting the effects of srcDir and vendoring on importPath.
+func Find(importPath, srcDir string) (filename, path string) {
+ return gcimporter.FindPkg(importPath, srcDir)
+}
+
+// NewReader returns a reader for the export data section of an object
+// (.o) or archive (.a) file read from r. The new reader may provide
+// additional trailing data beyond the end of the export data.
+func NewReader(r io.Reader) (io.Reader, error) {
+ buf := bufio.NewReader(r)
+ _, err := gcimporter.FindExportData(buf)
+ // If we ever switch to a zip-like archive format with the ToC
+ // at the end, we can return the correct portion of export data,
+ // but for now we must return the entire rest of the file.
+ return buf, err
+}
+
+// Read reads export data from in, decodes it, and returns type
+// information for the package.
+// The package name is specified by path.
+// File position information is added to fset.
+//
+// Read may inspect and add to the imports map to ensure that references
+// within the export data to other packages are consistent. The caller
+// must ensure that imports[path] does not exist, or exists but is
+// incomplete (see types.Package.Complete), and Read inserts the
+// resulting package into this map entry.
+//
+// On return, the state of the reader is undefined.
+func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
+ data, err := ioutil.ReadAll(in)
+ if err != nil {
+ return nil, fmt.Errorf("reading export data for %q: %v", path, err)
+ }
+
+ if bytes.HasPrefix(data, []byte("!<arch>")) {
+ return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
+ }
+
+ // The App Engine Go runtime v1.6 uses the old export data format.
+ // TODO(adonovan): delete once v1.7 has been around for a while.
+ if bytes.HasPrefix(data, []byte("package ")) {
+ return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
+ }
+
+ // The indexed export format starts with an 'i'; the older
+ // binary export format starts with a 'c', 'd', or 'v'
+ // (from "version"). Select appropriate importer.
+ if len(data) > 0 && data[0] == 'i' {
+ _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
+ return pkg, err
+ }
+
+ _, pkg, err := gcimporter.BImportData(fset, imports, data, path)
+ return pkg, err
+}
+
+// Write writes encoded type information for the specified package to out.
+// The FileSet provides file position information for named objects.
+func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
+ b, err := gcimporter.BExportData(fset, pkg)
+ if err != nil {
+ return err
+ }
+ _, err = out.Write(b)
+ return err
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
new file mode 100644
index 00000000..efe221e7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
@@ -0,0 +1,73 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gcexportdata
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "os"
+)
+
+// NewImporter returns a new instance of the types.Importer interface
+// that reads type information from export data files written by gc.
+// The Importer also satisfies types.ImporterFrom.
+//
+// Export data files are located using "go build" workspace conventions
+// and the build.Default context.
+//
+// Use this importer instead of go/importer.For("gc", ...) to avoid the
+// version-skew problems described in the documentation of this package,
+// or to control the FileSet or access the imports map populated during
+// package loading.
+//
+func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
+ return importer{fset, imports}
+}
+
+type importer struct {
+ fset *token.FileSet
+ imports map[string]*types.Package
+}
+
+func (imp importer) Import(importPath string) (*types.Package, error) {
+ return imp.ImportFrom(importPath, "", 0)
+}
+
+func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) {
+ filename, path := Find(importPath, srcDir)
+ if filename == "" {
+ if importPath == "unsafe" {
+ // Even for unsafe, call Find first in case
+ // the package was vendored.
+ return types.Unsafe, nil
+ }
+ return nil, fmt.Errorf("can't find import: %s", importPath)
+ }
+
+ if pkg, ok := imp.imports[path]; ok && pkg.Complete() {
+ return pkg, nil // cache hit
+ }
+
+ // open file
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ f.Close()
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("reading export data: %s: %v", filename, err)
+ }
+ }()
+
+ r, err := NewReader(f)
+ if err != nil {
+ return nil, err
+ }
+
+ return Read(r, imp.fset, imp.imports, path)
+}
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/main.go b/vendor/golang.org/x/tools/go/gcexportdata/main.go
new file mode 100644
index 00000000..2713dce6
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/gcexportdata/main.go
@@ -0,0 +1,99 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+// The gcexportdata command is a diagnostic tool that displays the
+// contents of gc export data files.
+package main
+
+import (
+ "flag"
+ "fmt"
+ "go/token"
+ "go/types"
+ "log"
+ "os"
+
+ "golang.org/x/tools/go/gcexportdata"
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+var packageFlag = flag.String("package", "", "alternative package to print")
+
+func main() {
+ log.SetPrefix("gcexportdata: ")
+ log.SetFlags(0)
+ flag.Usage = func() {
+ fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a")
+ }
+ flag.Parse()
+ if flag.NArg() != 1 {
+ flag.Usage()
+ os.Exit(2)
+ }
+ filename := flag.Args()[0]
+
+ f, err := os.Open(filename)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ r, err := gcexportdata.NewReader(f)
+ if err != nil {
+ log.Fatalf("%s: %s", filename, err)
+ }
+
+ // Decode the package.
+ const primary = "<primary>"
+ imports := make(map[string]*types.Package)
+ fset := token.NewFileSet()
+ pkg, err := gcexportdata.Read(r, fset, imports, primary)
+ if err != nil {
+ log.Fatalf("%s: %s", filename, err)
+ }
+
+ // Optionally select an indirectly mentioned package.
+ if *packageFlag != "" {
+ pkg = imports[*packageFlag]
+ if pkg == nil {
+ fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n",
+ filename, *packageFlag)
+ for p := range imports {
+ if p != primary {
+ fmt.Fprintf(os.Stderr, "\t%s\n", p)
+ }
+ }
+ os.Exit(1)
+ }
+ }
+
+ // Print all package-level declarations, including non-exported ones.
+ fmt.Printf("package %s\n", pkg.Name())
+ for _, imp := range pkg.Imports() {
+ fmt.Printf("import %q\n", imp.Path())
+ }
+ qual := func(p *types.Package) string {
+ if pkg == p {
+ return ""
+ }
+ return p.Name()
+ }
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ obj := scope.Lookup(name)
+ fmt.Printf("%s: %s\n",
+ fset.Position(obj.Pos()),
+ types.ObjectString(obj, qual))
+
+ // For types, print each method.
+ if _, ok := obj.(*types.TypeName); ok {
+ for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) {
+ fmt.Printf("%s: %s\n",
+ fset.Position(method.Obj().Pos()),
+ types.SelectionString(method, qual))
+ }
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
new file mode 100644
index 00000000..6a9821ae
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
@@ -0,0 +1,852 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Binary package export.
+// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
+// see that file for specification of the format.
+
+package gcimporter
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "math"
+ "math/big"
+ "sort"
+ "strings"
+)
+
+// If debugFormat is set, each integer and string value is preceded by a marker
+// and position information in the encoding. This mechanism permits an importer
+// to recognize immediately when it is out of sync. The importer recognizes this
+// mode automatically (i.e., it can import export data produced with debugging
+// support even if debugFormat is not set at the time of import). This mode will
+// lead to massively larger export data (by a factor of 2 to 3) and should only
+// be enabled during development and debugging.
+//
+// NOTE: This flag is the first flag to enable if importing dies because of
+// (suspected) format errors, and whenever a change is made to the format.
+const debugFormat = false // default: false
+
+// If trace is set, debugging output is printed to std out.
+const trace = false // default: false
+
+// Current export format version. Increase with each format change.
+// Note: The latest binary (non-indexed) export format is at version 6.
+// This exporter is still at level 4, but it doesn't matter since
+// the binary importer can handle older versions just fine.
+// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
+// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
+// 4: type name objects support type aliases, uses aliasTag
+// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
+// 2: removed unused bool in ODCL export (compiler only)
+// 1: header format change (more regular), export package for _ struct fields
+// 0: Go1.7 encoding
+const exportVersion = 4
+
+// trackAllTypes enables cycle tracking for all types, not just named
+// types. The existing compiler invariants assume that unnamed types
+// that are not completely set up are not used, or else there are spurious
+// errors.
+// If disabled, only named types are tracked, possibly leading to slightly
+// less efficient encoding in rare cases. It also prevents the export of
+// some corner-case type declarations (but those are not handled correctly
+// with with the textual export format either).
+// TODO(gri) enable and remove once issues caused by it are fixed
+const trackAllTypes = false
+
+type exporter struct {
+ fset *token.FileSet
+ out bytes.Buffer
+
+ // object -> index maps, indexed in order of serialization
+ strIndex map[string]int
+ pkgIndex map[*types.Package]int
+ typIndex map[types.Type]int
+
+ // position encoding
+ posInfoFormat bool
+ prevFile string
+ prevLine int
+
+ // debugging support
+ written int // bytes written
+ indent int // for trace
+}
+
+// internalError represents an error generated inside this package.
+type internalError string
+
+func (e internalError) Error() string { return "gcimporter: " + string(e) }
+
+func internalErrorf(format string, args ...interface{}) error {
+ return internalError(fmt.Sprintf(format, args...))
+}
+
+// BExportData returns binary export data for pkg.
+// If no file set is provided, position info will be missing.
+func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
+ defer func() {
+ if e := recover(); e != nil {
+ if ierr, ok := e.(internalError); ok {
+ err = ierr
+ return
+ }
+ // Not an internal error; panic again.
+ panic(e)
+ }
+ }()
+
+ p := exporter{
+ fset: fset,
+ strIndex: map[string]int{"": 0}, // empty string is mapped to 0
+ pkgIndex: make(map[*types.Package]int),
+ typIndex: make(map[types.Type]int),
+ posInfoFormat: true, // TODO(gri) might become a flag, eventually
+ }
+
+ // write version info
+ // The version string must start with "version %d" where %d is the version
+ // number. Additional debugging information may follow after a blank; that
+ // text is ignored by the importer.
+ p.rawStringln(fmt.Sprintf("version %d", exportVersion))
+ var debug string
+ if debugFormat {
+ debug = "debug"
+ }
+ p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
+ p.bool(trackAllTypes)
+ p.bool(p.posInfoFormat)
+
+ // --- generic export data ---
+
+ // populate type map with predeclared "known" types
+ for index, typ := range predeclared {
+ p.typIndex[typ] = index
+ }
+ if len(p.typIndex) != len(predeclared) {
+ return nil, internalError("duplicate entries in type map?")
+ }
+
+ // write package data
+ p.pkg(pkg, true)
+ if trace {
+ p.tracef("\n")
+ }
+
+ // write objects
+ objcount := 0
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ if !ast.IsExported(name) {
+ continue
+ }
+ if trace {
+ p.tracef("\n")
+ }
+ p.obj(scope.Lookup(name))
+ objcount++
+ }
+
+ // indicate end of list
+ if trace {
+ p.tracef("\n")
+ }
+ p.tag(endTag)
+
+ // for self-verification only (redundant)
+ p.int(objcount)
+
+ if trace {
+ p.tracef("\n")
+ }
+
+ // --- end of export data ---
+
+ return p.out.Bytes(), nil
+}
+
+func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
+ if pkg == nil {
+ panic(internalError("unexpected nil pkg"))
+ }
+
+ // if we saw the package before, write its index (>= 0)
+ if i, ok := p.pkgIndex[pkg]; ok {
+ p.index('P', i)
+ return
+ }
+
+ // otherwise, remember the package, write the package tag (< 0) and package data
+ if trace {
+ p.tracef("P%d = { ", len(p.pkgIndex))
+ defer p.tracef("} ")
+ }
+ p.pkgIndex[pkg] = len(p.pkgIndex)
+
+ p.tag(packageTag)
+ p.string(pkg.Name())
+ if emptypath {
+ p.string("")
+ } else {
+ p.string(pkg.Path())
+ }
+}
+
+func (p *exporter) obj(obj types.Object) {
+ switch obj := obj.(type) {
+ case *types.Const:
+ p.tag(constTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ p.typ(obj.Type())
+ p.value(obj.Val())
+
+ case *types.TypeName:
+ if isAlias(obj) {
+ p.tag(aliasTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ } else {
+ p.tag(typeTag)
+ }
+ p.typ(obj.Type())
+
+ case *types.Var:
+ p.tag(varTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ p.typ(obj.Type())
+
+ case *types.Func:
+ p.tag(funcTag)
+ p.pos(obj)
+ p.qualifiedName(obj)
+ sig := obj.Type().(*types.Signature)
+ p.paramList(sig.Params(), sig.Variadic())
+ p.paramList(sig.Results(), false)
+
+ default:
+ panic(internalErrorf("unexpected object %v (%T)", obj, obj))
+ }
+}
+
+func (p *exporter) pos(obj types.Object) {
+ if !p.posInfoFormat {
+ return
+ }
+
+ file, line := p.fileLine(obj)
+ if file == p.prevFile {
+ // common case: write line delta
+ // delta == 0 means different file or no line change
+ delta := line - p.prevLine
+ p.int(delta)
+ if delta == 0 {
+ p.int(-1) // -1 means no file change
+ }
+ } else {
+ // different file
+ p.int(0)
+ // Encode filename as length of common prefix with previous
+ // filename, followed by (possibly empty) suffix. Filenames
+ // frequently share path prefixes, so this can save a lot
+ // of space and make export data size less dependent on file
+ // path length. The suffix is unlikely to be empty because
+ // file names tend to end in ".go".
+ n := commonPrefixLen(p.prevFile, file)
+ p.int(n) // n >= 0
+ p.string(file[n:]) // write suffix only
+ p.prevFile = file
+ p.int(line)
+ }
+ p.prevLine = line
+}
+
+func (p *exporter) fileLine(obj types.Object) (file string, line int) {
+ if p.fset != nil {
+ pos := p.fset.Position(obj.Pos())
+ file = pos.Filename
+ line = pos.Line
+ }
+ return
+}
+
+func commonPrefixLen(a, b string) int {
+ if len(a) > len(b) {
+ a, b = b, a
+ }
+ // len(a) <= len(b)
+ i := 0
+ for i < len(a) && a[i] == b[i] {
+ i++
+ }
+ return i
+}
+
+func (p *exporter) qualifiedName(obj types.Object) {
+ p.string(obj.Name())
+ p.pkg(obj.Pkg(), false)
+}
+
+func (p *exporter) typ(t types.Type) {
+ if t == nil {
+ panic(internalError("nil type"))
+ }
+
+ // Possible optimization: Anonymous pointer types *T where
+ // T is a named type are common. We could canonicalize all
+ // such types *T to a single type PT = *T. This would lead
+ // to at most one *T entry in typIndex, and all future *T's
+ // would be encoded as the respective index directly. Would
+ // save 1 byte (pointerTag) per *T and reduce the typIndex
+ // size (at the cost of a canonicalization map). We can do
+ // this later, without encoding format change.
+
+ // if we saw the type before, write its index (>= 0)
+ if i, ok := p.typIndex[t]; ok {
+ p.index('T', i)
+ return
+ }
+
+ // otherwise, remember the type, write the type tag (< 0) and type data
+ if trackAllTypes {
+ if trace {
+ p.tracef("T%d = {>\n", len(p.typIndex))
+ defer p.tracef("<\n} ")
+ }
+ p.typIndex[t] = len(p.typIndex)
+ }
+
+ switch t := t.(type) {
+ case *types.Named:
+ if !trackAllTypes {
+ // if we don't track all types, track named types now
+ p.typIndex[t] = len(p.typIndex)
+ }
+
+ p.tag(namedTag)
+ p.pos(t.Obj())
+ p.qualifiedName(t.Obj())
+ p.typ(t.Underlying())
+ if !types.IsInterface(t) {
+ p.assocMethods(t)
+ }
+
+ case *types.Array:
+ p.tag(arrayTag)
+ p.int64(t.Len())
+ p.typ(t.Elem())
+
+ case *types.Slice:
+ p.tag(sliceTag)
+ p.typ(t.Elem())
+
+ case *dddSlice:
+ p.tag(dddTag)
+ p.typ(t.elem)
+
+ case *types.Struct:
+ p.tag(structTag)
+ p.fieldList(t)
+
+ case *types.Pointer:
+ p.tag(pointerTag)
+ p.typ(t.Elem())
+
+ case *types.Signature:
+ p.tag(signatureTag)
+ p.paramList(t.Params(), t.Variadic())
+ p.paramList(t.Results(), false)
+
+ case *types.Interface:
+ p.tag(interfaceTag)
+ p.iface(t)
+
+ case *types.Map:
+ p.tag(mapTag)
+ p.typ(t.Key())
+ p.typ(t.Elem())
+
+ case *types.Chan:
+ p.tag(chanTag)
+ p.int(int(3 - t.Dir())) // hack
+ p.typ(t.Elem())
+
+ default:
+ panic(internalErrorf("unexpected type %T: %s", t, t))
+ }
+}
+
+func (p *exporter) assocMethods(named *types.Named) {
+ // Sort methods (for determinism).
+ var methods []*types.Func
+ for i := 0; i < named.NumMethods(); i++ {
+ methods = append(methods, named.Method(i))
+ }
+ sort.Sort(methodsByName(methods))
+
+ p.int(len(methods))
+
+ if trace && methods != nil {
+ p.tracef("associated methods {>\n")
+ }
+
+ for i, m := range methods {
+ if trace && i > 0 {
+ p.tracef("\n")
+ }
+
+ p.pos(m)
+ name := m.Name()
+ p.string(name)
+ if !exported(name) {
+ p.pkg(m.Pkg(), false)
+ }
+
+ sig := m.Type().(*types.Signature)
+ p.paramList(types.NewTuple(sig.Recv()), false)
+ p.paramList(sig.Params(), sig.Variadic())
+ p.paramList(sig.Results(), false)
+ p.int(0) // dummy value for go:nointerface pragma - ignored by importer
+ }
+
+ if trace && methods != nil {
+ p.tracef("<\n} ")
+ }
+}
+
+type methodsByName []*types.Func
+
+func (x methodsByName) Len() int { return len(x) }
+func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
+
+func (p *exporter) fieldList(t *types.Struct) {
+ if trace && t.NumFields() > 0 {
+ p.tracef("fields {>\n")
+ defer p.tracef("<\n} ")
+ }
+
+ p.int(t.NumFields())
+ for i := 0; i < t.NumFields(); i++ {
+ if trace && i > 0 {
+ p.tracef("\n")
+ }
+ p.field(t.Field(i))
+ p.string(t.Tag(i))
+ }
+}
+
+func (p *exporter) field(f *types.Var) {
+ if !f.IsField() {
+ panic(internalError("field expected"))
+ }
+
+ p.pos(f)
+ p.fieldName(f)
+ p.typ(f.Type())
+}
+
+func (p *exporter) iface(t *types.Interface) {
+ // TODO(gri): enable importer to load embedded interfaces,
+ // then emit Embeddeds and ExplicitMethods separately here.
+ p.int(0)
+
+ n := t.NumMethods()
+ if trace && n > 0 {
+ p.tracef("methods {>\n")
+ defer p.tracef("<\n} ")
+ }
+ p.int(n)
+ for i := 0; i < n; i++ {
+ if trace && i > 0 {
+ p.tracef("\n")
+ }
+ p.method(t.Method(i))
+ }
+}
+
+func (p *exporter) method(m *types.Func) {
+ sig := m.Type().(*types.Signature)
+ if sig.Recv() == nil {
+ panic(internalError("method expected"))
+ }
+
+ p.pos(m)
+ p.string(m.Name())
+ if m.Name() != "_" && !ast.IsExported(m.Name()) {
+ p.pkg(m.Pkg(), false)
+ }
+
+ // interface method; no need to encode receiver.
+ p.paramList(sig.Params(), sig.Variadic())
+ p.paramList(sig.Results(), false)
+}
+
+func (p *exporter) fieldName(f *types.Var) {
+ name := f.Name()
+
+ if f.Anonymous() {
+ // anonymous field - we distinguish between 3 cases:
+ // 1) field name matches base type name and is exported
+ // 2) field name matches base type name and is not exported
+ // 3) field name doesn't match base type name (alias name)
+ bname := basetypeName(f.Type())
+ if name == bname {
+ if ast.IsExported(name) {
+ name = "" // 1) we don't need to know the field name or package
+ } else {
+ name = "?" // 2) use unexported name "?" to force package export
+ }
+ } else {
+ // 3) indicate alias and export name as is
+ // (this requires an extra "@" but this is a rare case)
+ p.string("@")
+ }
+ }
+
+ p.string(name)
+ if name != "" && !ast.IsExported(name) {
+ p.pkg(f.Pkg(), false)
+ }
+}
+
+func basetypeName(typ types.Type) string {
+ switch typ := deref(typ).(type) {
+ case *types.Basic:
+ return typ.Name()
+ case *types.Named:
+ return typ.Obj().Name()
+ default:
+ return "" // unnamed type
+ }
+}
+
+func (p *exporter) paramList(params *types.Tuple, variadic bool) {
+ // use negative length to indicate unnamed parameters
+ // (look at the first parameter only since either all
+ // names are present or all are absent)
+ n := params.Len()
+ if n > 0 && params.At(0).Name() == "" {
+ n = -n
+ }
+ p.int(n)
+ for i := 0; i < params.Len(); i++ {
+ q := params.At(i)
+ t := q.Type()
+ if variadic && i == params.Len()-1 {
+ t = &dddSlice{t.(*types.Slice).Elem()}
+ }
+ p.typ(t)
+ if n > 0 {
+ name := q.Name()
+ p.string(name)
+ if name != "_" {
+ p.pkg(q.Pkg(), false)
+ }
+ }
+ p.string("") // no compiler-specific info
+ }
+}
+
+func (p *exporter) value(x constant.Value) {
+ if trace {
+ p.tracef("= ")
+ }
+
+ switch x.Kind() {
+ case constant.Bool:
+ tag := falseTag
+ if constant.BoolVal(x) {
+ tag = trueTag
+ }
+ p.tag(tag)
+
+ case constant.Int:
+ if v, exact := constant.Int64Val(x); exact {
+ // common case: x fits into an int64 - use compact encoding
+ p.tag(int64Tag)
+ p.int64(v)
+ return
+ }
+ // uncommon case: large x - use float encoding
+ // (powers of 2 will be encoded efficiently with exponent)
+ p.tag(floatTag)
+ p.float(constant.ToFloat(x))
+
+ case constant.Float:
+ p.tag(floatTag)
+ p.float(x)
+
+ case constant.Complex:
+ p.tag(complexTag)
+ p.float(constant.Real(x))
+ p.float(constant.Imag(x))
+
+ case constant.String:
+ p.tag(stringTag)
+ p.string(constant.StringVal(x))
+
+ case constant.Unknown:
+ // package contains type errors
+ p.tag(unknownTag)
+
+ default:
+ panic(internalErrorf("unexpected value %v (%T)", x, x))
+ }
+}
+
+func (p *exporter) float(x constant.Value) {
+ if x.Kind() != constant.Float {
+ panic(internalErrorf("unexpected constant %v, want float", x))
+ }
+ // extract sign (there is no -0)
+ sign := constant.Sign(x)
+ if sign == 0 {
+ // x == 0
+ p.int(0)
+ return
+ }
+ // x != 0
+
+ var f big.Float
+ if v, exact := constant.Float64Val(x); exact {
+ // float64
+ f.SetFloat64(v)
+ } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
+ // TODO(gri): add big.Rat accessor to constant.Value.
+ r := valueToRat(num)
+ f.SetRat(r.Quo(r, valueToRat(denom)))
+ } else {
+ // Value too large to represent as a fraction => inaccessible.
+ // TODO(gri): add big.Float accessor to constant.Value.
+ f.SetFloat64(math.MaxFloat64) // FIXME
+ }
+
+ // extract exponent such that 0.5 <= m < 1.0
+ var m big.Float
+ exp := f.MantExp(&m)
+
+ // extract mantissa as *big.Int
+ // - set exponent large enough so mant satisfies mant.IsInt()
+ // - get *big.Int from mant
+ m.SetMantExp(&m, int(m.MinPrec()))
+ mant, acc := m.Int(nil)
+ if acc != big.Exact {
+ panic(internalError("internal error"))
+ }
+
+ p.int(sign)
+ p.int(exp)
+ p.string(string(mant.Bytes()))
+}
+
+func valueToRat(x constant.Value) *big.Rat {
+ // Convert little-endian to big-endian.
+ // I can't believe this is necessary.
+ bytes := constant.Bytes(x)
+ for i := 0; i < len(bytes)/2; i++ {
+ bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
+ }
+ return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
+}
+
+func (p *exporter) bool(b bool) bool {
+ if trace {
+ p.tracef("[")
+ defer p.tracef("= %v] ", b)
+ }
+
+ x := 0
+ if b {
+ x = 1
+ }
+ p.int(x)
+ return b
+}
+
+// ----------------------------------------------------------------------------
+// Low-level encoders
+
+func (p *exporter) index(marker byte, index int) {
+ if index < 0 {
+ panic(internalError("invalid index < 0"))
+ }
+ if debugFormat {
+ p.marker('t')
+ }
+ if trace {
+ p.tracef("%c%d ", marker, index)
+ }
+ p.rawInt64(int64(index))
+}
+
+func (p *exporter) tag(tag int) {
+ if tag >= 0 {
+ panic(internalError("invalid tag >= 0"))
+ }
+ if debugFormat {
+ p.marker('t')
+ }
+ if trace {
+ p.tracef("%s ", tagString[-tag])
+ }
+ p.rawInt64(int64(tag))
+}
+
+func (p *exporter) int(x int) {
+ p.int64(int64(x))
+}
+
+func (p *exporter) int64(x int64) {
+ if debugFormat {
+ p.marker('i')
+ }
+ if trace {
+ p.tracef("%d ", x)
+ }
+ p.rawInt64(x)
+}
+
+func (p *exporter) string(s string) {
+ if debugFormat {
+ p.marker('s')
+ }
+ if trace {
+ p.tracef("%q ", s)
+ }
+ // if we saw the string before, write its index (>= 0)
+ // (the empty string is mapped to 0)
+ if i, ok := p.strIndex[s]; ok {
+ p.rawInt64(int64(i))
+ return
+ }
+ // otherwise, remember string and write its negative length and bytes
+ p.strIndex[s] = len(p.strIndex)
+ p.rawInt64(-int64(len(s)))
+ for i := 0; i < len(s); i++ {
+ p.rawByte(s[i])
+ }
+}
+
+// marker emits a marker byte and position information which makes
+// it easy for a reader to detect if it is "out of sync". Used for
+// debugFormat format only.
+func (p *exporter) marker(m byte) {
+ p.rawByte(m)
+ // Enable this for help tracking down the location
+ // of an incorrect marker when running in debugFormat.
+ if false && trace {
+ p.tracef("#%d ", p.written)
+ }
+ p.rawInt64(int64(p.written))
+}
+
+// rawInt64 should only be used by low-level encoders.
+func (p *exporter) rawInt64(x int64) {
+ var tmp [binary.MaxVarintLen64]byte
+ n := binary.PutVarint(tmp[:], x)
+ for i := 0; i < n; i++ {
+ p.rawByte(tmp[i])
+ }
+}
+
+// rawStringln should only be used to emit the initial version string.
+func (p *exporter) rawStringln(s string) {
+ for i := 0; i < len(s); i++ {
+ p.rawByte(s[i])
+ }
+ p.rawByte('\n')
+}
+
+// rawByte is the bottleneck interface to write to p.out.
+// rawByte escapes b as follows (any encoding does that
+// hides '$'):
+//
+// '$' => '|' 'S'
+// '|' => '|' '|'
+//
+// Necessary so other tools can find the end of the
+// export data by searching for "$$".
+// rawByte should only be used by low-level encoders.
+func (p *exporter) rawByte(b byte) {
+ switch b {
+ case '$':
+ // write '$' as '|' 'S'
+ b = 'S'
+ fallthrough
+ case '|':
+ // write '|' as '|' '|'
+ p.out.WriteByte('|')
+ p.written++
+ }
+ p.out.WriteByte(b)
+ p.written++
+}
+
+// tracef is like fmt.Printf but it rewrites the format string
+// to take care of indentation.
+func (p *exporter) tracef(format string, args ...interface{}) {
+ if strings.ContainsAny(format, "<>\n") {
+ var buf bytes.Buffer
+ for i := 0; i < len(format); i++ {
+ // no need to deal with runes
+ ch := format[i]
+ switch ch {
+ case '>':
+ p.indent++
+ continue
+ case '<':
+ p.indent--
+ continue
+ }
+ buf.WriteByte(ch)
+ if ch == '\n' {
+ for j := p.indent; j > 0; j-- {
+ buf.WriteString(". ")
+ }
+ }
+ }
+ format = buf.String()
+ }
+ fmt.Printf(format, args...)
+}
+
+// Debugging support.
+// (tagString is only used when tracing is enabled)
+var tagString = [...]string{
+ // Packages
+ -packageTag: "package",
+
+ // Types
+ -namedTag: "named type",
+ -arrayTag: "array",
+ -sliceTag: "slice",
+ -dddTag: "ddd",
+ -structTag: "struct",
+ -pointerTag: "pointer",
+ -signatureTag: "signature",
+ -interfaceTag: "interface",
+ -mapTag: "map",
+ -chanTag: "chan",
+
+ // Values
+ -falseTag: "false",
+ -trueTag: "true",
+ -int64Tag: "int64",
+ -floatTag: "float",
+ -fractionTag: "fraction",
+ -complexTag: "complex",
+ -stringTag: "string",
+ -unknownTag: "unknown",
+
+ // Type aliases
+ -aliasTag: "alias",
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go
new file mode 100644
index 00000000..b31eacfc
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go
@@ -0,0 +1,1028 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go.
+
+package gcimporter
+
+import (
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+)
+
+type importer struct {
+ imports map[string]*types.Package
+ data []byte
+ importpath string
+ buf []byte // for reading strings
+ version int // export format version
+
+ // object lists
+ strList []string // in order of appearance
+ pathList []string // in order of appearance
+ pkgList []*types.Package // in order of appearance
+ typList []types.Type // in order of appearance
+ interfaceList []*types.Interface // for delayed completion only
+ trackAllTypes bool
+
+ // position encoding
+ posInfoFormat bool
+ prevFile string
+ prevLine int
+ fake fakeFileSet
+
+ // debugging support
+ debugFormat bool
+ read int // bytes read
+}
+
+// BImportData imports a package from the serialized package data
+// and returns the number of bytes consumed and a reference to the package.
+// If the export data version is not recognized or the format is otherwise
+// compromised, an error is returned.
+func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ // catch panics and return them as errors
+ const currentVersion = 6
+ version := -1 // unknown version
+ defer func() {
+ if e := recover(); e != nil {
+ // Return a (possibly nil or incomplete) package unchanged (see #16088).
+ if version > currentVersion {
+ err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
+ } else {
+ err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
+ }
+ }
+ }()
+
+ p := importer{
+ imports: imports,
+ data: data,
+ importpath: path,
+ version: version,
+ strList: []string{""}, // empty string is mapped to 0
+ pathList: []string{""}, // empty string is mapped to 0
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*token.File),
+ },
+ }
+
+ // read version info
+ var versionstr string
+ if b := p.rawByte(); b == 'c' || b == 'd' {
+ // Go1.7 encoding; first byte encodes low-level
+ // encoding format (compact vs debug).
+ // For backward-compatibility only (avoid problems with
+ // old installed packages). Newly compiled packages use
+ // the extensible format string.
+ // TODO(gri) Remove this support eventually; after Go1.8.
+ if b == 'd' {
+ p.debugFormat = true
+ }
+ p.trackAllTypes = p.rawByte() == 'a'
+ p.posInfoFormat = p.int() != 0
+ versionstr = p.string()
+ if versionstr == "v1" {
+ version = 0
+ }
+ } else {
+ // Go1.8 extensible encoding
+ // read version string and extract version number (ignore anything after the version number)
+ versionstr = p.rawStringln(b)
+ if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" {
+ if v, err := strconv.Atoi(s[1]); err == nil && v > 0 {
+ version = v
+ }
+ }
+ }
+ p.version = version
+
+ // read version specific flags - extend as necessary
+ switch p.version {
+ // case currentVersion:
+ // ...
+ // fallthrough
+ case currentVersion, 5, 4, 3, 2, 1:
+ p.debugFormat = p.rawStringln(p.rawByte()) == "debug"
+ p.trackAllTypes = p.int() != 0
+ p.posInfoFormat = p.int() != 0
+ case 0:
+ // Go1.7 encoding format - nothing to do here
+ default:
+ errorf("unknown bexport format version %d (%q)", p.version, versionstr)
+ }
+
+ // --- generic export data ---
+
+ // populate typList with predeclared "known" types
+ p.typList = append(p.typList, predeclared...)
+
+ // read package data
+ pkg = p.pkg()
+
+ // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go)
+ objcount := 0
+ for {
+ tag := p.tagOrIndex()
+ if tag == endTag {
+ break
+ }
+ p.obj(tag)
+ objcount++
+ }
+
+ // self-verification
+ if count := p.int(); count != objcount {
+ errorf("got %d objects; want %d", objcount, count)
+ }
+
+ // ignore compiler-specific import data
+
+ // complete interfaces
+ // TODO(gri) re-investigate if we still need to do this in a delayed fashion
+ for _, typ := range p.interfaceList {
+ typ.Complete()
+ }
+
+ // record all referenced packages as imports
+ list := append(([]*types.Package)(nil), p.pkgList[1:]...)
+ sort.Sort(byPath(list))
+ pkg.SetImports(list)
+
+ // package was imported completely and without errors
+ pkg.MarkComplete()
+
+ return p.read, pkg, nil
+}
+
+func errorf(format string, args ...interface{}) {
+ panic(fmt.Sprintf(format, args...))
+}
+
+func (p *importer) pkg() *types.Package {
+ // if the package was seen before, i is its index (>= 0)
+ i := p.tagOrIndex()
+ if i >= 0 {
+ return p.pkgList[i]
+ }
+
+ // otherwise, i is the package tag (< 0)
+ if i != packageTag {
+ errorf("unexpected package tag %d version %d", i, p.version)
+ }
+
+ // read package data
+ name := p.string()
+ var path string
+ if p.version >= 5 {
+ path = p.path()
+ } else {
+ path = p.string()
+ }
+ if p.version >= 6 {
+ p.int() // package height; unused by go/types
+ }
+
+ // we should never see an empty package name
+ if name == "" {
+ errorf("empty package name in import")
+ }
+
+ // an empty path denotes the package we are currently importing;
+ // it must be the first package we see
+ if (path == "") != (len(p.pkgList) == 0) {
+ errorf("package path %q for pkg index %d", path, len(p.pkgList))
+ }
+
+ // if the package was imported before, use that one; otherwise create a new one
+ if path == "" {
+ path = p.importpath
+ }
+ pkg := p.imports[path]
+ if pkg == nil {
+ pkg = types.NewPackage(path, name)
+ p.imports[path] = pkg
+ } else if pkg.Name() != name {
+ errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path)
+ }
+ p.pkgList = append(p.pkgList, pkg)
+
+ return pkg
+}
+
+// objTag returns the tag value for each object kind.
+func objTag(obj types.Object) int {
+ switch obj.(type) {
+ case *types.Const:
+ return constTag
+ case *types.TypeName:
+ return typeTag
+ case *types.Var:
+ return varTag
+ case *types.Func:
+ return funcTag
+ default:
+ errorf("unexpected object: %v (%T)", obj, obj) // panics
+ panic("unreachable")
+ }
+}
+
+func sameObj(a, b types.Object) bool {
+ // Because unnamed types are not canonicalized, we cannot simply compare types for
+ // (pointer) identity.
+ // Ideally we'd check equality of constant values as well, but this is good enough.
+ return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type())
+}
+
+func (p *importer) declare(obj types.Object) {
+ pkg := obj.Pkg()
+ if alt := pkg.Scope().Insert(obj); alt != nil {
+ // This can only trigger if we import a (non-type) object a second time.
+ // Excluding type aliases, this cannot happen because 1) we only import a package
+ // once; and b) we ignore compiler-specific export data which may contain
+ // functions whose inlined function bodies refer to other functions that
+ // were already imported.
+ // However, type aliases require reexporting the original type, so we need
+ // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go,
+ // method importer.obj, switch case importing functions).
+ // TODO(gri) review/update this comment once the gc compiler handles type aliases.
+ if !sameObj(obj, alt) {
+ errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt)
+ }
+ }
+}
+
+func (p *importer) obj(tag int) {
+ switch tag {
+ case constTag:
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ typ := p.typ(nil, nil)
+ val := p.value()
+ p.declare(types.NewConst(pos, pkg, name, typ, val))
+
+ case aliasTag:
+ // TODO(gri) verify type alias hookup is correct
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ typ := p.typ(nil, nil)
+ p.declare(types.NewTypeName(pos, pkg, name, typ))
+
+ case typeTag:
+ p.typ(nil, nil)
+
+ case varTag:
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ typ := p.typ(nil, nil)
+ p.declare(types.NewVar(pos, pkg, name, typ))
+
+ case funcTag:
+ pos := p.pos()
+ pkg, name := p.qualifiedName()
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ sig := types.NewSignature(nil, params, result, isddd)
+ p.declare(types.NewFunc(pos, pkg, name, sig))
+
+ default:
+ errorf("unexpected object tag %d", tag)
+ }
+}
+
+const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go
+
+func (p *importer) pos() token.Pos {
+ if !p.posInfoFormat {
+ return token.NoPos
+ }
+
+ file := p.prevFile
+ line := p.prevLine
+ delta := p.int()
+ line += delta
+ if p.version >= 5 {
+ if delta == deltaNewFile {
+ if n := p.int(); n >= 0 {
+ // file changed
+ file = p.path()
+ line = n
+ }
+ }
+ } else {
+ if delta == 0 {
+ if n := p.int(); n >= 0 {
+ // file changed
+ file = p.prevFile[:n] + p.string()
+ line = p.int()
+ }
+ }
+ }
+ p.prevFile = file
+ p.prevLine = line
+
+ return p.fake.pos(file, line)
+}
+
+// Synthesize a token.Pos
+type fakeFileSet struct {
+ fset *token.FileSet
+ files map[string]*token.File
+}
+
+func (s *fakeFileSet) pos(file string, line int) token.Pos {
+ // Since we don't know the set of needed file positions, we
+ // reserve maxlines positions per file.
+ const maxlines = 64 * 1024
+ f := s.files[file]
+ if f == nil {
+ f = s.fset.AddFile(file, -1, maxlines)
+ s.files[file] = f
+ // Allocate the fake linebreak indices on first use.
+ // TODO(adonovan): opt: save ~512KB using a more complex scheme?
+ fakeLinesOnce.Do(func() {
+ fakeLines = make([]int, maxlines)
+ for i := range fakeLines {
+ fakeLines[i] = i
+ }
+ })
+ f.SetLines(fakeLines)
+ }
+
+ if line > maxlines {
+ line = 1
+ }
+
+ // Treat the file as if it contained only newlines
+ // and column=1: use the line number as the offset.
+ return f.Pos(line - 1)
+}
+
+var (
+ fakeLines []int
+ fakeLinesOnce sync.Once
+)
+
+func (p *importer) qualifiedName() (pkg *types.Package, name string) {
+ name = p.string()
+ pkg = p.pkg()
+ return
+}
+
+func (p *importer) record(t types.Type) {
+ p.typList = append(p.typList, t)
+}
+
+// A dddSlice is a types.Type representing ...T parameters.
+// It only appears for parameter types and does not escape
+// the importer.
+type dddSlice struct {
+ elem types.Type
+}
+
+func (t *dddSlice) Underlying() types.Type { return t }
+func (t *dddSlice) String() string { return "..." + t.elem.String() }
+
+// parent is the package which declared the type; parent == nil means
+// the package currently imported. The parent package is needed for
+// exported struct fields and interface methods which don't contain
+// explicit package information in the export data.
+//
+// A non-nil tname is used as the "owner" of the result type; i.e.,
+// the result type is the underlying type of tname. tname is used
+// to give interface methods a named receiver type where possible.
+func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type {
+ // if the type was seen before, i is its index (>= 0)
+ i := p.tagOrIndex()
+ if i >= 0 {
+ return p.typList[i]
+ }
+
+ // otherwise, i is the type tag (< 0)
+ switch i {
+ case namedTag:
+ // read type object
+ pos := p.pos()
+ parent, name := p.qualifiedName()
+ scope := parent.Scope()
+ obj := scope.Lookup(name)
+
+ // if the object doesn't exist yet, create and insert it
+ if obj == nil {
+ obj = types.NewTypeName(pos, parent, name, nil)
+ scope.Insert(obj)
+ }
+
+ if _, ok := obj.(*types.TypeName); !ok {
+ errorf("pkg = %s, name = %s => %s", parent, name, obj)
+ }
+
+ // associate new named type with obj if it doesn't exist yet
+ t0 := types.NewNamed(obj.(*types.TypeName), nil, nil)
+
+ // but record the existing type, if any
+ tname := obj.Type().(*types.Named) // tname is either t0 or the existing type
+ p.record(tname)
+
+ // read underlying type
+ t0.SetUnderlying(p.typ(parent, t0))
+
+ // interfaces don't have associated methods
+ if types.IsInterface(t0) {
+ return tname
+ }
+
+ // read associated methods
+ for i := p.int(); i > 0; i-- {
+ // TODO(gri) replace this with something closer to fieldName
+ pos := p.pos()
+ name := p.string()
+ if !exported(name) {
+ p.pkg()
+ }
+
+ recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver?
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ p.int() // go:nointerface pragma - discarded
+
+ sig := types.NewSignature(recv.At(0), params, result, isddd)
+ t0.AddMethod(types.NewFunc(pos, parent, name, sig))
+ }
+
+ return tname
+
+ case arrayTag:
+ t := new(types.Array)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ n := p.int64()
+ *t = *types.NewArray(p.typ(parent, nil), n)
+ return t
+
+ case sliceTag:
+ t := new(types.Slice)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ *t = *types.NewSlice(p.typ(parent, nil))
+ return t
+
+ case dddTag:
+ t := new(dddSlice)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ t.elem = p.typ(parent, nil)
+ return t
+
+ case structTag:
+ t := new(types.Struct)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ *t = *types.NewStruct(p.fieldList(parent))
+ return t
+
+ case pointerTag:
+ t := new(types.Pointer)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ *t = *types.NewPointer(p.typ(parent, nil))
+ return t
+
+ case signatureTag:
+ t := new(types.Signature)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ *t = *types.NewSignature(nil, params, result, isddd)
+ return t
+
+ case interfaceTag:
+ // Create a dummy entry in the type list. This is safe because we
+ // cannot expect the interface type to appear in a cycle, as any
+ // such cycle must contain a named type which would have been
+ // first defined earlier.
+ // TODO(gri) Is this still true now that we have type aliases?
+ // See issue #23225.
+ n := len(p.typList)
+ if p.trackAllTypes {
+ p.record(nil)
+ }
+
+ var embeddeds []types.Type
+ for n := p.int(); n > 0; n-- {
+ p.pos()
+ embeddeds = append(embeddeds, p.typ(parent, nil))
+ }
+
+ t := newInterface(p.methodList(parent, tname), embeddeds)
+ p.interfaceList = append(p.interfaceList, t)
+ if p.trackAllTypes {
+ p.typList[n] = t
+ }
+ return t
+
+ case mapTag:
+ t := new(types.Map)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ key := p.typ(parent, nil)
+ val := p.typ(parent, nil)
+ *t = *types.NewMap(key, val)
+ return t
+
+ case chanTag:
+ t := new(types.Chan)
+ if p.trackAllTypes {
+ p.record(t)
+ }
+
+ dir := chanDir(p.int())
+ val := p.typ(parent, nil)
+ *t = *types.NewChan(dir, val)
+ return t
+
+ default:
+ errorf("unexpected type tag %d", i) // panics
+ panic("unreachable")
+ }
+}
+
+func chanDir(d int) types.ChanDir {
+ // tag values must match the constants in cmd/compile/internal/gc/go.go
+ switch d {
+ case 1 /* Crecv */ :
+ return types.RecvOnly
+ case 2 /* Csend */ :
+ return types.SendOnly
+ case 3 /* Cboth */ :
+ return types.SendRecv
+ default:
+ errorf("unexpected channel dir %d", d)
+ return 0
+ }
+}
+
+func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) {
+ if n := p.int(); n > 0 {
+ fields = make([]*types.Var, n)
+ tags = make([]string, n)
+ for i := range fields {
+ fields[i], tags[i] = p.field(parent)
+ }
+ }
+ return
+}
+
+func (p *importer) field(parent *types.Package) (*types.Var, string) {
+ pos := p.pos()
+ pkg, name, alias := p.fieldName(parent)
+ typ := p.typ(parent, nil)
+ tag := p.string()
+
+ anonymous := false
+ if name == "" {
+ // anonymous field - typ must be T or *T and T must be a type name
+ switch typ := deref(typ).(type) {
+ case *types.Basic: // basic types are named types
+ pkg = nil // // objects defined in Universe scope have no package
+ name = typ.Name()
+ case *types.Named:
+ name = typ.Obj().Name()
+ default:
+ errorf("named base type expected")
+ }
+ anonymous = true
+ } else if alias {
+ // anonymous field: we have an explicit name because it's an alias
+ anonymous = true
+ }
+
+ return types.NewField(pos, pkg, name, typ, anonymous), tag
+}
+
+func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) {
+ if n := p.int(); n > 0 {
+ methods = make([]*types.Func, n)
+ for i := range methods {
+ methods[i] = p.method(parent, baseType)
+ }
+ }
+ return
+}
+
+func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func {
+ pos := p.pos()
+ pkg, name, _ := p.fieldName(parent)
+ // If we don't have a baseType, use a nil receiver.
+ // A receiver using the actual interface type (which
+ // we don't know yet) will be filled in when we call
+ // types.Interface.Complete.
+ var recv *types.Var
+ if baseType != nil {
+ recv = types.NewVar(token.NoPos, parent, "", baseType)
+ }
+ params, isddd := p.paramList()
+ result, _ := p.paramList()
+ sig := types.NewSignature(recv, params, result, isddd)
+ return types.NewFunc(pos, pkg, name, sig)
+}
+
+func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) {
+ name = p.string()
+ pkg = parent
+ if pkg == nil {
+ // use the imported package instead
+ pkg = p.pkgList[0]
+ }
+ if p.version == 0 && name == "_" {
+ // version 0 didn't export a package for _ fields
+ return
+ }
+ switch name {
+ case "":
+ // 1) field name matches base type name and is exported: nothing to do
+ case "?":
+ // 2) field name matches base type name and is not exported: need package
+ name = ""
+ pkg = p.pkg()
+ case "@":
+ // 3) field name doesn't match type name (alias)
+ name = p.string()
+ alias = true
+ fallthrough
+ default:
+ if !exported(name) {
+ pkg = p.pkg()
+ }
+ }
+ return
+}
+
+func (p *importer) paramList() (*types.Tuple, bool) {
+ n := p.int()
+ if n == 0 {
+ return nil, false
+ }
+ // negative length indicates unnamed parameters
+ named := true
+ if n < 0 {
+ n = -n
+ named = false
+ }
+ // n > 0
+ params := make([]*types.Var, n)
+ isddd := false
+ for i := range params {
+ params[i], isddd = p.param(named)
+ }
+ return types.NewTuple(params...), isddd
+}
+
+func (p *importer) param(named bool) (*types.Var, bool) {
+ t := p.typ(nil, nil)
+ td, isddd := t.(*dddSlice)
+ if isddd {
+ t = types.NewSlice(td.elem)
+ }
+
+ var pkg *types.Package
+ var name string
+ if named {
+ name = p.string()
+ if name == "" {
+ errorf("expected named parameter")
+ }
+ if name != "_" {
+ pkg = p.pkg()
+ }
+ if i := strings.Index(name, "·"); i > 0 {
+ name = name[:i] // cut off gc-specific parameter numbering
+ }
+ }
+
+ // read and discard compiler-specific info
+ p.string()
+
+ return types.NewVar(token.NoPos, pkg, name, t), isddd
+}
+
+func exported(name string) bool {
+ ch, _ := utf8.DecodeRuneInString(name)
+ return unicode.IsUpper(ch)
+}
+
+func (p *importer) value() constant.Value {
+ switch tag := p.tagOrIndex(); tag {
+ case falseTag:
+ return constant.MakeBool(false)
+ case trueTag:
+ return constant.MakeBool(true)
+ case int64Tag:
+ return constant.MakeInt64(p.int64())
+ case floatTag:
+ return p.float()
+ case complexTag:
+ re := p.float()
+ im := p.float()
+ return constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+ case stringTag:
+ return constant.MakeString(p.string())
+ case unknownTag:
+ return constant.MakeUnknown()
+ default:
+ errorf("unexpected value tag %d", tag) // panics
+ panic("unreachable")
+ }
+}
+
+func (p *importer) float() constant.Value {
+ sign := p.int()
+ if sign == 0 {
+ return constant.MakeInt64(0)
+ }
+
+ exp := p.int()
+ mant := []byte(p.string()) // big endian
+
+ // remove leading 0's if any
+ for len(mant) > 0 && mant[0] == 0 {
+ mant = mant[1:]
+ }
+
+ // convert to little endian
+ // TODO(gri) go/constant should have a more direct conversion function
+ // (e.g., once it supports a big.Float based implementation)
+ for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 {
+ mant[i], mant[j] = mant[j], mant[i]
+ }
+
+ // adjust exponent (constant.MakeFromBytes creates an integer value,
+ // but mant represents the mantissa bits such that 0.5 <= mant < 1.0)
+ exp -= len(mant) << 3
+ if len(mant) > 0 {
+ for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 {
+ exp++
+ }
+ }
+
+ x := constant.MakeFromBytes(mant)
+ switch {
+ case exp < 0:
+ d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
+ x = constant.BinaryOp(x, token.QUO, d)
+ case exp > 0:
+ x = constant.Shift(x, token.SHL, uint(exp))
+ }
+
+ if sign < 0 {
+ x = constant.UnaryOp(token.SUB, x, 0)
+ }
+ return x
+}
+
+// ----------------------------------------------------------------------------
+// Low-level decoders
+
+func (p *importer) tagOrIndex() int {
+ if p.debugFormat {
+ p.marker('t')
+ }
+
+ return int(p.rawInt64())
+}
+
+func (p *importer) int() int {
+ x := p.int64()
+ if int64(int(x)) != x {
+ errorf("exported integer too large")
+ }
+ return int(x)
+}
+
+func (p *importer) int64() int64 {
+ if p.debugFormat {
+ p.marker('i')
+ }
+
+ return p.rawInt64()
+}
+
+func (p *importer) path() string {
+ if p.debugFormat {
+ p.marker('p')
+ }
+ // if the path was seen before, i is its index (>= 0)
+ // (the empty string is at index 0)
+ i := p.rawInt64()
+ if i >= 0 {
+ return p.pathList[i]
+ }
+ // otherwise, i is the negative path length (< 0)
+ a := make([]string, -i)
+ for n := range a {
+ a[n] = p.string()
+ }
+ s := strings.Join(a, "/")
+ p.pathList = append(p.pathList, s)
+ return s
+}
+
+func (p *importer) string() string {
+ if p.debugFormat {
+ p.marker('s')
+ }
+ // if the string was seen before, i is its index (>= 0)
+ // (the empty string is at index 0)
+ i := p.rawInt64()
+ if i >= 0 {
+ return p.strList[i]
+ }
+ // otherwise, i is the negative string length (< 0)
+ if n := int(-i); n <= cap(p.buf) {
+ p.buf = p.buf[:n]
+ } else {
+ p.buf = make([]byte, n)
+ }
+ for i := range p.buf {
+ p.buf[i] = p.rawByte()
+ }
+ s := string(p.buf)
+ p.strList = append(p.strList, s)
+ return s
+}
+
+func (p *importer) marker(want byte) {
+ if got := p.rawByte(); got != want {
+ errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read)
+ }
+
+ pos := p.read
+ if n := int(p.rawInt64()); n != pos {
+ errorf("incorrect position: got %d; want %d", n, pos)
+ }
+}
+
+// rawInt64 should only be used by low-level decoders.
+func (p *importer) rawInt64() int64 {
+ i, err := binary.ReadVarint(p)
+ if err != nil {
+ errorf("read error: %v", err)
+ }
+ return i
+}
+
+// rawStringln should only be used to read the initial version string.
+func (p *importer) rawStringln(b byte) string {
+ p.buf = p.buf[:0]
+ for b != '\n' {
+ p.buf = append(p.buf, b)
+ b = p.rawByte()
+ }
+ return string(p.buf)
+}
+
+// needed for binary.ReadVarint in rawInt64
+func (p *importer) ReadByte() (byte, error) {
+ return p.rawByte(), nil
+}
+
+// byte is the bottleneck interface for reading p.data.
+// It unescapes '|' 'S' to '$' and '|' '|' to '|'.
+// rawByte should only be used by low-level decoders.
+func (p *importer) rawByte() byte {
+ b := p.data[0]
+ r := 1
+ if b == '|' {
+ b = p.data[1]
+ r = 2
+ switch b {
+ case 'S':
+ b = '$'
+ case '|':
+ // nothing to do
+ default:
+ errorf("unexpected escape sequence in export data")
+ }
+ }
+ p.data = p.data[r:]
+ p.read += r
+ return b
+
+}
+
+// ----------------------------------------------------------------------------
+// Export format
+
+// Tags. Must be < 0.
+const (
+ // Objects
+ packageTag = -(iota + 1)
+ constTag
+ typeTag
+ varTag
+ funcTag
+ endTag
+
+ // Types
+ namedTag
+ arrayTag
+ sliceTag
+ dddTag
+ structTag
+ pointerTag
+ signatureTag
+ interfaceTag
+ mapTag
+ chanTag
+
+ // Values
+ falseTag
+ trueTag
+ int64Tag
+ floatTag
+ fractionTag // not used by gc
+ complexTag
+ stringTag
+ nilTag // only used by gc (appears in exported inlined function bodies)
+ unknownTag // not used by gc (only appears in packages with errors)
+
+ // Type aliases
+ aliasTag
+)
+
+var predeclared = []types.Type{
+ // basic types
+ types.Typ[types.Bool],
+ types.Typ[types.Int],
+ types.Typ[types.Int8],
+ types.Typ[types.Int16],
+ types.Typ[types.Int32],
+ types.Typ[types.Int64],
+ types.Typ[types.Uint],
+ types.Typ[types.Uint8],
+ types.Typ[types.Uint16],
+ types.Typ[types.Uint32],
+ types.Typ[types.Uint64],
+ types.Typ[types.Uintptr],
+ types.Typ[types.Float32],
+ types.Typ[types.Float64],
+ types.Typ[types.Complex64],
+ types.Typ[types.Complex128],
+ types.Typ[types.String],
+
+ // basic type aliases
+ types.Universe.Lookup("byte").Type(),
+ types.Universe.Lookup("rune").Type(),
+
+ // error
+ types.Universe.Lookup("error").Type(),
+
+ // untyped types
+ types.Typ[types.UntypedBool],
+ types.Typ[types.UntypedInt],
+ types.Typ[types.UntypedRune],
+ types.Typ[types.UntypedFloat],
+ types.Typ[types.UntypedComplex],
+ types.Typ[types.UntypedString],
+ types.Typ[types.UntypedNil],
+
+ // package unsafe
+ types.Typ[types.UnsafePointer],
+
+ // invalid type
+ types.Typ[types.Invalid], // only appears in packages with errors
+
+ // used internally by gc; never used by this package or in .a files
+ anyType{},
+}
+
+type anyType struct{}
+
+func (t anyType) Underlying() types.Type { return t }
+func (t anyType) String() string { return "any" }
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go
new file mode 100644
index 00000000..f33dc561
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go
@@ -0,0 +1,93 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
+
+// This file implements FindExportData.
+
+package gcimporter
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+)
+
+func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
+ // See $GOROOT/include/ar.h.
+ hdr := make([]byte, 16+12+6+6+8+10+2)
+ _, err = io.ReadFull(r, hdr)
+ if err != nil {
+ return
+ }
+ // leave for debugging
+ if false {
+ fmt.Printf("header: %s", hdr)
+ }
+ s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
+ size, err = strconv.Atoi(s)
+ if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
+ err = fmt.Errorf("invalid archive header")
+ return
+ }
+ name = strings.TrimSpace(string(hdr[:16]))
+ return
+}
+
+// FindExportData positions the reader r at the beginning of the
+// export data section of an underlying GC-created object/archive
+// file by reading from it. The reader must be positioned at the
+// start of the file before calling this function. The hdr result
+// is the string before the export data, either "$$" or "$$B".
+//
+func FindExportData(r *bufio.Reader) (hdr string, err error) {
+ // Read first line to make sure this is an object file.
+ line, err := r.ReadSlice('\n')
+ if err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+
+ if string(line) == "!<arch>\n" {
+ // Archive file. Scan to __.PKGDEF.
+ var name string
+ if name, _, err = readGopackHeader(r); err != nil {
+ return
+ }
+
+ // First entry should be __.PKGDEF.
+ if name != "__.PKGDEF" {
+ err = fmt.Errorf("go archive is missing __.PKGDEF")
+ return
+ }
+
+ // Read first line of __.PKGDEF data, so that line
+ // is once again the first line of the input.
+ if line, err = r.ReadSlice('\n'); err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+ }
+
+ // Now at __.PKGDEF in archive or still at beginning of file.
+ // Either way, line should begin with "go object ".
+ if !strings.HasPrefix(string(line), "go object ") {
+ err = fmt.Errorf("not a Go object file")
+ return
+ }
+
+ // Skip over object header to export data.
+ // Begins after first line starting with $$.
+ for line[0] != '$' {
+ if line, err = r.ReadSlice('\n'); err != nil {
+ err = fmt.Errorf("can't find export data (%v)", err)
+ return
+ }
+ }
+ hdr = string(line)
+
+ return
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
new file mode 100644
index 00000000..47dd4613
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
@@ -0,0 +1,1051 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a modified copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go,
+// but it also contains the original source-based importer code for Go1.6.
+// Once we stop supporting 1.6, we can remove that code.
+
+// Package gcimporter provides various functions for reading
+// gc-generated object files that can be used to implement the
+// Importer interface defined by the Go 1.5 standard library package.
+package gcimporter // import "golang.org/x/tools/go/internal/gcimporter"
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "go/build"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+ "text/scanner"
+)
+
+// debugging/development support
+const debug = false
+
+var pkgExts = [...]string{".a", ".o"}
+
+// FindPkg returns the filename and unique package id for an import
+// path based on package information provided by build.Import (using
+// the build.Default build.Context). A relative srcDir is interpreted
+// relative to the current working directory.
+// If no file was found, an empty filename is returned.
+//
+func FindPkg(path, srcDir string) (filename, id string) {
+ if path == "" {
+ return
+ }
+
+ var noext string
+ switch {
+ default:
+ // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
+ // Don't require the source files to be present.
+ if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
+ srcDir = abs
+ }
+ bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
+ if bp.PkgObj == "" {
+ id = path // make sure we have an id to print in error message
+ return
+ }
+ noext = strings.TrimSuffix(bp.PkgObj, ".a")
+ id = bp.ImportPath
+
+ case build.IsLocalImport(path):
+ // "./x" -> "/this/directory/x.ext", "/this/directory/x"
+ noext = filepath.Join(srcDir, path)
+ id = noext
+
+ case filepath.IsAbs(path):
+ // for completeness only - go/build.Import
+ // does not support absolute imports
+ // "/x" -> "/x.ext", "/x"
+ noext = path
+ id = path
+ }
+
+ if false { // for debugging
+ if path != id {
+ fmt.Printf("%s -> %s\n", path, id)
+ }
+ }
+
+ // try extensions
+ for _, ext := range pkgExts {
+ filename = noext + ext
+ if f, err := os.Stat(filename); err == nil && !f.IsDir() {
+ return
+ }
+ }
+
+ filename = "" // not found
+ return
+}
+
+// ImportData imports a package by reading the gc-generated export data,
+// adds the corresponding package object to the packages map indexed by id,
+// and returns the object.
+//
+// The packages map must contains all packages already imported. The data
+// reader position must be the beginning of the export data section. The
+// filename is only used in error messages.
+//
+// If packages[id] contains the completely imported package, that package
+// can be used directly, and there is no need to call this function (but
+// there is also no harm but for extra time used).
+//
+func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
+ // support for parser error handling
+ defer func() {
+ switch r := recover().(type) {
+ case nil:
+ // nothing to do
+ case importError:
+ err = r
+ default:
+ panic(r) // internal error
+ }
+ }()
+
+ var p parser
+ p.init(filename, id, data, packages)
+ pkg = p.parseExport()
+
+ return
+}
+
+// Import imports a gc-generated package given its import path and srcDir, adds
+// the corresponding package object to the packages map, and returns the object.
+// The packages map must contain all packages already imported.
+//
+func Import(packages map[string]*types.Package, path, srcDir string) (pkg *types.Package, err error) {
+ filename, id := FindPkg(path, srcDir)
+ if filename == "" {
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+ err = fmt.Errorf("can't find import: %q", id)
+ return
+ }
+
+ // no need to re-import if the package was imported completely before
+ if pkg = packages[id]; pkg != nil && pkg.Complete() {
+ return
+ }
+
+ // open file
+ f, err := os.Open(filename)
+ if err != nil {
+ return
+ }
+ defer func() {
+ f.Close()
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("reading export data: %s: %v", filename, err)
+ }
+ }()
+
+ var hdr string
+ buf := bufio.NewReader(f)
+ if hdr, err = FindExportData(buf); err != nil {
+ return
+ }
+
+ switch hdr {
+ case "$$\n":
+ return ImportData(packages, filename, id, buf)
+
+ case "$$B\n":
+ var data []byte
+ data, err = ioutil.ReadAll(buf)
+ if err != nil {
+ break
+ }
+
+ // TODO(gri): allow clients of go/importer to provide a FileSet.
+ // Or, define a new standard go/types/gcexportdata package.
+ fset := token.NewFileSet()
+
+ // The indexed export format starts with an 'i'; the older
+ // binary export format starts with a 'c', 'd', or 'v'
+ // (from "version"). Select appropriate importer.
+ if len(data) > 0 && data[0] == 'i' {
+ _, pkg, err = IImportData(fset, packages, data[1:], id)
+ } else {
+ _, pkg, err = BImportData(fset, packages, data, id)
+ }
+
+ default:
+ err = fmt.Errorf("unknown export data header: %q", hdr)
+ }
+
+ return
+}
+
+// ----------------------------------------------------------------------------
+// Parser
+
+// TODO(gri) Imported objects don't have position information.
+// Ideally use the debug table line info; alternatively
+// create some fake position (or the position of the
+// import). That way error messages referring to imported
+// objects can print meaningful information.
+
+// parser parses the exports inside a gc compiler-produced
+// object/archive file and populates its scope with the results.
+type parser struct {
+ scanner scanner.Scanner
+ tok rune // current token
+ lit string // literal string; only valid for Ident, Int, String tokens
+ id string // package id of imported package
+ sharedPkgs map[string]*types.Package // package id -> package object (across importer)
+ localPkgs map[string]*types.Package // package id -> package object (just this package)
+}
+
+func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) {
+ p.scanner.Init(src)
+ p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
+ p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
+ p.scanner.Whitespace = 1<<'\t' | 1<<' '
+ p.scanner.Filename = filename // for good error messages
+ p.next()
+ p.id = id
+ p.sharedPkgs = packages
+ if debug {
+ // check consistency of packages map
+ for _, pkg := range packages {
+ if pkg.Name() == "" {
+ fmt.Printf("no package name for %s\n", pkg.Path())
+ }
+ }
+ }
+}
+
+func (p *parser) next() {
+ p.tok = p.scanner.Scan()
+ switch p.tok {
+ case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·':
+ p.lit = p.scanner.TokenText()
+ default:
+ p.lit = ""
+ }
+ if debug {
+ fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
+ }
+}
+
+func declTypeName(pkg *types.Package, name string) *types.TypeName {
+ scope := pkg.Scope()
+ if obj := scope.Lookup(name); obj != nil {
+ return obj.(*types.TypeName)
+ }
+ obj := types.NewTypeName(token.NoPos, pkg, name, nil)
+ // a named type may be referred to before the underlying type
+ // is known - set it up
+ types.NewNamed(obj, nil, nil)
+ scope.Insert(obj)
+ return obj
+}
+
+// ----------------------------------------------------------------------------
+// Error handling
+
+// Internal errors are boxed as importErrors.
+type importError struct {
+ pos scanner.Position
+ err error
+}
+
+func (e importError) Error() string {
+ return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
+}
+
+func (p *parser) error(err interface{}) {
+ if s, ok := err.(string); ok {
+ err = errors.New(s)
+ }
+ // panic with a runtime.Error if err is not an error
+ panic(importError{p.scanner.Pos(), err.(error)})
+}
+
+func (p *parser) errorf(format string, args ...interface{}) {
+ p.error(fmt.Sprintf(format, args...))
+}
+
+func (p *parser) expect(tok rune) string {
+ lit := p.lit
+ if p.tok != tok {
+ p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
+ }
+ p.next()
+ return lit
+}
+
+func (p *parser) expectSpecial(tok string) {
+ sep := 'x' // not white space
+ i := 0
+ for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
+ sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+ p.next()
+ i++
+ }
+ if i < len(tok) {
+ p.errorf("expected %q, got %q", tok, tok[0:i])
+ }
+}
+
+func (p *parser) expectKeyword(keyword string) {
+ lit := p.expect(scanner.Ident)
+ if lit != keyword {
+ p.errorf("expected keyword %s, got %q", keyword, lit)
+ }
+}
+
+// ----------------------------------------------------------------------------
+// Qualified and unqualified names
+
+// PackageId = string_lit .
+//
+func (p *parser) parsePackageId() string {
+ id, err := strconv.Unquote(p.expect(scanner.String))
+ if err != nil {
+ p.error(err)
+ }
+ // id == "" stands for the imported package id
+ // (only known at time of package installation)
+ if id == "" {
+ id = p.id
+ }
+ return id
+}
+
+// PackageName = ident .
+//
+func (p *parser) parsePackageName() string {
+ return p.expect(scanner.Ident)
+}
+
+// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
+func (p *parser) parseDotIdent() string {
+ ident := ""
+ if p.tok != scanner.Int {
+ sep := 'x' // not white space
+ for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
+ ident += p.lit
+ sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
+ p.next()
+ }
+ }
+ if ident == "" {
+ p.expect(scanner.Ident) // use expect() for error handling
+ }
+ return ident
+}
+
+// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
+//
+func (p *parser) parseQualifiedName() (id, name string) {
+ p.expect('@')
+ id = p.parsePackageId()
+ p.expect('.')
+ // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
+ if p.tok == '?' {
+ p.next()
+ } else {
+ name = p.parseDotIdent()
+ }
+ return
+}
+
+// getPkg returns the package for a given id. If the package is
+// not found, create the package and add it to the p.localPkgs
+// and p.sharedPkgs maps. name is the (expected) name of the
+// package. If name == "", the package name is expected to be
+// set later via an import clause in the export data.
+//
+// id identifies a package, usually by a canonical package path like
+// "encoding/json" but possibly by a non-canonical import path like
+// "./json".
+//
+func (p *parser) getPkg(id, name string) *types.Package {
+ // package unsafe is not in the packages maps - handle explicitly
+ if id == "unsafe" {
+ return types.Unsafe
+ }
+
+ pkg := p.localPkgs[id]
+ if pkg == nil {
+ // first import of id from this package
+ pkg = p.sharedPkgs[id]
+ if pkg == nil {
+ // first import of id by this importer;
+ // add (possibly unnamed) pkg to shared packages
+ pkg = types.NewPackage(id, name)
+ p.sharedPkgs[id] = pkg
+ }
+ // add (possibly unnamed) pkg to local packages
+ if p.localPkgs == nil {
+ p.localPkgs = make(map[string]*types.Package)
+ }
+ p.localPkgs[id] = pkg
+ } else if name != "" {
+ // package exists already and we have an expected package name;
+ // make sure names match or set package name if necessary
+ if pname := pkg.Name(); pname == "" {
+ pkg.SetName(name)
+ } else if pname != name {
+ p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name)
+ }
+ }
+ return pkg
+}
+
+// parseExportedName is like parseQualifiedName, but
+// the package id is resolved to an imported *types.Package.
+//
+func (p *parser) parseExportedName() (pkg *types.Package, name string) {
+ id, name := p.parseQualifiedName()
+ pkg = p.getPkg(id, "")
+ return
+}
+
+// ----------------------------------------------------------------------------
+// Types
+
+// BasicType = identifier .
+//
+func (p *parser) parseBasicType() types.Type {
+ id := p.expect(scanner.Ident)
+ obj := types.Universe.Lookup(id)
+ if obj, ok := obj.(*types.TypeName); ok {
+ return obj.Type()
+ }
+ p.errorf("not a basic type: %s", id)
+ return nil
+}
+
+// ArrayType = "[" int_lit "]" Type .
+//
+func (p *parser) parseArrayType(parent *types.Package) types.Type {
+ // "[" already consumed and lookahead known not to be "]"
+ lit := p.expect(scanner.Int)
+ p.expect(']')
+ elem := p.parseType(parent)
+ n, err := strconv.ParseInt(lit, 10, 64)
+ if err != nil {
+ p.error(err)
+ }
+ return types.NewArray(elem, n)
+}
+
+// MapType = "map" "[" Type "]" Type .
+//
+func (p *parser) parseMapType(parent *types.Package) types.Type {
+ p.expectKeyword("map")
+ p.expect('[')
+ key := p.parseType(parent)
+ p.expect(']')
+ elem := p.parseType(parent)
+ return types.NewMap(key, elem)
+}
+
+// Name = identifier | "?" | QualifiedName .
+//
+// For unqualified and anonymous names, the returned package is the parent
+// package unless parent == nil, in which case the returned package is the
+// package being imported. (The parent package is not nil if the the name
+// is an unqualified struct field or interface method name belonging to a
+// type declared in another package.)
+//
+// For qualified names, the returned package is nil (and not created if
+// it doesn't exist yet) unless materializePkg is set (which creates an
+// unnamed package with valid package path). In the latter case, a
+// subsequent import clause is expected to provide a name for the package.
+//
+func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
+ pkg = parent
+ if pkg == nil {
+ pkg = p.sharedPkgs[p.id]
+ }
+ switch p.tok {
+ case scanner.Ident:
+ name = p.lit
+ p.next()
+ case '?':
+ // anonymous
+ p.next()
+ case '@':
+ // exported name prefixed with package path
+ pkg = nil
+ var id string
+ id, name = p.parseQualifiedName()
+ if materializePkg {
+ pkg = p.getPkg(id, "")
+ }
+ default:
+ p.error("name expected")
+ }
+ return
+}
+
+func deref(typ types.Type) types.Type {
+ if p, _ := typ.(*types.Pointer); p != nil {
+ return p.Elem()
+ }
+ return typ
+}
+
+// Field = Name Type [ string_lit ] .
+//
+func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
+ pkg, name := p.parseName(parent, true)
+
+ if name == "_" {
+ // Blank fields should be package-qualified because they
+ // are unexported identifiers, but gc does not qualify them.
+ // Assuming that the ident belongs to the current package
+ // causes types to change during re-exporting, leading
+ // to spurious "can't assign A to B" errors from go/types.
+ // As a workaround, pretend all blank fields belong
+ // to the same unique dummy package.
+ const blankpkg = "<_>"
+ pkg = p.getPkg(blankpkg, blankpkg)
+ }
+
+ typ := p.parseType(parent)
+ anonymous := false
+ if name == "" {
+ // anonymous field - typ must be T or *T and T must be a type name
+ switch typ := deref(typ).(type) {
+ case *types.Basic: // basic types are named types
+ pkg = nil // objects defined in Universe scope have no package
+ name = typ.Name()
+ case *types.Named:
+ name = typ.Obj().Name()
+ default:
+ p.errorf("anonymous field expected")
+ }
+ anonymous = true
+ }
+ tag := ""
+ if p.tok == scanner.String {
+ s := p.expect(scanner.String)
+ var err error
+ tag, err = strconv.Unquote(s)
+ if err != nil {
+ p.errorf("invalid struct tag %s: %s", s, err)
+ }
+ }
+ return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
+}
+
+// StructType = "struct" "{" [ FieldList ] "}" .
+// FieldList = Field { ";" Field } .
+//
+func (p *parser) parseStructType(parent *types.Package) types.Type {
+ var fields []*types.Var
+ var tags []string
+
+ p.expectKeyword("struct")
+ p.expect('{')
+ for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
+ if i > 0 {
+ p.expect(';')
+ }
+ fld, tag := p.parseField(parent)
+ if tag != "" && tags == nil {
+ tags = make([]string, i)
+ }
+ if tags != nil {
+ tags = append(tags, tag)
+ }
+ fields = append(fields, fld)
+ }
+ p.expect('}')
+
+ return types.NewStruct(fields, tags)
+}
+
+// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
+//
+func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
+ _, name := p.parseName(nil, false)
+ // remove gc-specific parameter numbering
+ if i := strings.Index(name, "·"); i >= 0 {
+ name = name[:i]
+ }
+ if p.tok == '.' {
+ p.expectSpecial("...")
+ isVariadic = true
+ }
+ typ := p.parseType(nil)
+ if isVariadic {
+ typ = types.NewSlice(typ)
+ }
+ // ignore argument tag (e.g. "noescape")
+ if p.tok == scanner.String {
+ p.next()
+ }
+ // TODO(gri) should we provide a package?
+ par = types.NewVar(token.NoPos, nil, name, typ)
+ return
+}
+
+// Parameters = "(" [ ParameterList ] ")" .
+// ParameterList = { Parameter "," } Parameter .
+//
+func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
+ p.expect('(')
+ for p.tok != ')' && p.tok != scanner.EOF {
+ if len(list) > 0 {
+ p.expect(',')
+ }
+ par, variadic := p.parseParameter()
+ list = append(list, par)
+ if variadic {
+ if isVariadic {
+ p.error("... not on final argument")
+ }
+ isVariadic = true
+ }
+ }
+ p.expect(')')
+
+ return
+}
+
+// Signature = Parameters [ Result ] .
+// Result = Type | Parameters .
+//
+func (p *parser) parseSignature(recv *types.Var) *types.Signature {
+ params, isVariadic := p.parseParameters()
+
+ // optional result type
+ var results []*types.Var
+ if p.tok == '(' {
+ var variadic bool
+ results, variadic = p.parseParameters()
+ if variadic {
+ p.error("... not permitted on result type")
+ }
+ }
+
+ return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
+}
+
+// InterfaceType = "interface" "{" [ MethodList ] "}" .
+// MethodList = Method { ";" Method } .
+// Method = Name Signature .
+//
+// The methods of embedded interfaces are always "inlined"
+// by the compiler and thus embedded interfaces are never
+// visible in the export data.
+//
+func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
+ var methods []*types.Func
+
+ p.expectKeyword("interface")
+ p.expect('{')
+ for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
+ if i > 0 {
+ p.expect(';')
+ }
+ pkg, name := p.parseName(parent, true)
+ sig := p.parseSignature(nil)
+ methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig))
+ }
+ p.expect('}')
+
+ // Complete requires the type's embedded interfaces to be fully defined,
+ // but we do not define any
+ return types.NewInterface(methods, nil).Complete()
+}
+
+// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
+//
+func (p *parser) parseChanType(parent *types.Package) types.Type {
+ dir := types.SendRecv
+ if p.tok == scanner.Ident {
+ p.expectKeyword("chan")
+ if p.tok == '<' {
+ p.expectSpecial("<-")
+ dir = types.SendOnly
+ }
+ } else {
+ p.expectSpecial("<-")
+ p.expectKeyword("chan")
+ dir = types.RecvOnly
+ }
+ elem := p.parseType(parent)
+ return types.NewChan(dir, elem)
+}
+
+// Type =
+// BasicType | TypeName | ArrayType | SliceType | StructType |
+// PointerType | FuncType | InterfaceType | MapType | ChanType |
+// "(" Type ")" .
+//
+// BasicType = ident .
+// TypeName = ExportedName .
+// SliceType = "[" "]" Type .
+// PointerType = "*" Type .
+// FuncType = "func" Signature .
+//
+func (p *parser) parseType(parent *types.Package) types.Type {
+ switch p.tok {
+ case scanner.Ident:
+ switch p.lit {
+ default:
+ return p.parseBasicType()
+ case "struct":
+ return p.parseStructType(parent)
+ case "func":
+ // FuncType
+ p.next()
+ return p.parseSignature(nil)
+ case "interface":
+ return p.parseInterfaceType(parent)
+ case "map":
+ return p.parseMapType(parent)
+ case "chan":
+ return p.parseChanType(parent)
+ }
+ case '@':
+ // TypeName
+ pkg, name := p.parseExportedName()
+ return declTypeName(pkg, name).Type()
+ case '[':
+ p.next() // look ahead
+ if p.tok == ']' {
+ // SliceType
+ p.next()
+ return types.NewSlice(p.parseType(parent))
+ }
+ return p.parseArrayType(parent)
+ case '*':
+ // PointerType
+ p.next()
+ return types.NewPointer(p.parseType(parent))
+ case '<':
+ return p.parseChanType(parent)
+ case '(':
+ // "(" Type ")"
+ p.next()
+ typ := p.parseType(parent)
+ p.expect(')')
+ return typ
+ }
+ p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
+ return nil
+}
+
+// ----------------------------------------------------------------------------
+// Declarations
+
+// ImportDecl = "import" PackageName PackageId .
+//
+func (p *parser) parseImportDecl() {
+ p.expectKeyword("import")
+ name := p.parsePackageName()
+ p.getPkg(p.parsePackageId(), name)
+}
+
+// int_lit = [ "+" | "-" ] { "0" ... "9" } .
+//
+func (p *parser) parseInt() string {
+ s := ""
+ switch p.tok {
+ case '-':
+ s = "-"
+ p.next()
+ case '+':
+ p.next()
+ }
+ return s + p.expect(scanner.Int)
+}
+
+// number = int_lit [ "p" int_lit ] .
+//
+func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
+ // mantissa
+ mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0)
+ if mant == nil {
+ panic("invalid mantissa")
+ }
+
+ if p.lit == "p" {
+ // exponent (base 2)
+ p.next()
+ exp, err := strconv.ParseInt(p.parseInt(), 10, 0)
+ if err != nil {
+ p.error(err)
+ }
+ if exp < 0 {
+ denom := constant.MakeInt64(1)
+ denom = constant.Shift(denom, token.SHL, uint(-exp))
+ typ = types.Typ[types.UntypedFloat]
+ val = constant.BinaryOp(mant, token.QUO, denom)
+ return
+ }
+ if exp > 0 {
+ mant = constant.Shift(mant, token.SHL, uint(exp))
+ }
+ typ = types.Typ[types.UntypedFloat]
+ val = mant
+ return
+ }
+
+ typ = types.Typ[types.UntypedInt]
+ val = mant
+ return
+}
+
+// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
+// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
+// bool_lit = "true" | "false" .
+// complex_lit = "(" float_lit "+" float_lit "i" ")" .
+// rune_lit = "(" int_lit "+" int_lit ")" .
+// string_lit = `"` { unicode_char } `"` .
+//
+func (p *parser) parseConstDecl() {
+ p.expectKeyword("const")
+ pkg, name := p.parseExportedName()
+
+ var typ0 types.Type
+ if p.tok != '=' {
+ // constant types are never structured - no need for parent type
+ typ0 = p.parseType(nil)
+ }
+
+ p.expect('=')
+ var typ types.Type
+ var val constant.Value
+ switch p.tok {
+ case scanner.Ident:
+ // bool_lit
+ if p.lit != "true" && p.lit != "false" {
+ p.error("expected true or false")
+ }
+ typ = types.Typ[types.UntypedBool]
+ val = constant.MakeBool(p.lit == "true")
+ p.next()
+
+ case '-', scanner.Int:
+ // int_lit
+ typ, val = p.parseNumber()
+
+ case '(':
+ // complex_lit or rune_lit
+ p.next()
+ if p.tok == scanner.Char {
+ p.next()
+ p.expect('+')
+ typ = types.Typ[types.UntypedRune]
+ _, val = p.parseNumber()
+ p.expect(')')
+ break
+ }
+ _, re := p.parseNumber()
+ p.expect('+')
+ _, im := p.parseNumber()
+ p.expectKeyword("i")
+ p.expect(')')
+ typ = types.Typ[types.UntypedComplex]
+ val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+
+ case scanner.Char:
+ // rune_lit
+ typ = types.Typ[types.UntypedRune]
+ val = constant.MakeFromLiteral(p.lit, token.CHAR, 0)
+ p.next()
+
+ case scanner.String:
+ // string_lit
+ typ = types.Typ[types.UntypedString]
+ val = constant.MakeFromLiteral(p.lit, token.STRING, 0)
+ p.next()
+
+ default:
+ p.errorf("expected literal got %s", scanner.TokenString(p.tok))
+ }
+
+ if typ0 == nil {
+ typ0 = typ
+ }
+
+ pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
+}
+
+// TypeDecl = "type" ExportedName Type .
+//
+func (p *parser) parseTypeDecl() {
+ p.expectKeyword("type")
+ pkg, name := p.parseExportedName()
+ obj := declTypeName(pkg, name)
+
+ // The type object may have been imported before and thus already
+ // have a type associated with it. We still need to parse the type
+ // structure, but throw it away if the object already has a type.
+ // This ensures that all imports refer to the same type object for
+ // a given type declaration.
+ typ := p.parseType(pkg)
+
+ if name := obj.Type().(*types.Named); name.Underlying() == nil {
+ name.SetUnderlying(typ)
+ }
+}
+
+// VarDecl = "var" ExportedName Type .
+//
+func (p *parser) parseVarDecl() {
+ p.expectKeyword("var")
+ pkg, name := p.parseExportedName()
+ typ := p.parseType(pkg)
+ pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
+}
+
+// Func = Signature [ Body ] .
+// Body = "{" ... "}" .
+//
+func (p *parser) parseFunc(recv *types.Var) *types.Signature {
+ sig := p.parseSignature(recv)
+ if p.tok == '{' {
+ p.next()
+ for i := 1; i > 0; p.next() {
+ switch p.tok {
+ case '{':
+ i++
+ case '}':
+ i--
+ }
+ }
+ }
+ return sig
+}
+
+// MethodDecl = "func" Receiver Name Func .
+// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
+//
+func (p *parser) parseMethodDecl() {
+ // "func" already consumed
+ p.expect('(')
+ recv, _ := p.parseParameter() // receiver
+ p.expect(')')
+
+ // determine receiver base type object
+ base := deref(recv.Type()).(*types.Named)
+
+ // parse method name, signature, and possibly inlined body
+ _, name := p.parseName(nil, false)
+ sig := p.parseFunc(recv)
+
+ // methods always belong to the same package as the base type object
+ pkg := base.Obj().Pkg()
+
+ // add method to type unless type was imported before
+ // and method exists already
+ // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small.
+ base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
+}
+
+// FuncDecl = "func" ExportedName Func .
+//
+func (p *parser) parseFuncDecl() {
+ // "func" already consumed
+ pkg, name := p.parseExportedName()
+ typ := p.parseFunc(nil)
+ pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
+}
+
+// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
+//
+func (p *parser) parseDecl() {
+ if p.tok == scanner.Ident {
+ switch p.lit {
+ case "import":
+ p.parseImportDecl()
+ case "const":
+ p.parseConstDecl()
+ case "type":
+ p.parseTypeDecl()
+ case "var":
+ p.parseVarDecl()
+ case "func":
+ p.next() // look ahead
+ if p.tok == '(' {
+ p.parseMethodDecl()
+ } else {
+ p.parseFuncDecl()
+ }
+ }
+ }
+ p.expect('\n')
+}
+
+// ----------------------------------------------------------------------------
+// Export
+
+// Export = "PackageClause { Decl } "$$" .
+// PackageClause = "package" PackageName [ "safe" ] "\n" .
+//
+func (p *parser) parseExport() *types.Package {
+ p.expectKeyword("package")
+ name := p.parsePackageName()
+ if p.tok == scanner.Ident && p.lit == "safe" {
+ // package was compiled with -u option - ignore
+ p.next()
+ }
+ p.expect('\n')
+
+ pkg := p.getPkg(p.id, name)
+
+ for p.tok != '$' && p.tok != scanner.EOF {
+ p.parseDecl()
+ }
+
+ if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
+ // don't call next()/expect() since reading past the
+ // export data may cause scanner errors (e.g. NUL chars)
+ p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
+ }
+
+ if n := p.scanner.ErrorCount; n != 0 {
+ p.errorf("expected no scanner errors, got %d", n)
+ }
+
+ // Record all locally referenced packages as imports.
+ var imports []*types.Package
+ for id, pkg2 := range p.localPkgs {
+ if pkg2.Name() == "" {
+ p.errorf("%s package has no name", id)
+ }
+ if id == p.id {
+ continue // avoid self-edge
+ }
+ imports = append(imports, pkg2)
+ }
+ sort.Sort(byPath(imports))
+ pkg.SetImports(imports)
+
+ // package was imported completely and without errors
+ pkg.MarkComplete()
+
+ return pkg
+}
+
+type byPath []*types.Package
+
+func (a byPath) Len() int { return len(a) }
+func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
new file mode 100644
index 00000000..0fd22bb0
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
@@ -0,0 +1,598 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed package import.
+// See cmd/compile/internal/gc/iexport.go for the export data format.
+
+// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
+
+package gcimporter
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "io"
+ "sort"
+)
+
+type intReader struct {
+ *bytes.Reader
+ path string
+}
+
+func (r *intReader) int64() int64 {
+ i, err := binary.ReadVarint(r.Reader)
+ if err != nil {
+ errorf("import %q: read varint error: %v", r.path, err)
+ }
+ return i
+}
+
+func (r *intReader) uint64() uint64 {
+ i, err := binary.ReadUvarint(r.Reader)
+ if err != nil {
+ errorf("import %q: read varint error: %v", r.path, err)
+ }
+ return i
+}
+
+const predeclReserved = 32
+
+type itag uint64
+
+const (
+ // Types
+ definedType itag = iota
+ pointerType
+ sliceType
+ arrayType
+ chanType
+ mapType
+ signatureType
+ structType
+ interfaceType
+)
+
+// IImportData imports a package from the serialized package data
+// and returns the number of bytes consumed and a reference to the package.
+// If the export data version is not recognized or the format is otherwise
+// compromised, an error is returned.
+func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ const currentVersion = 0
+ version := -1
+ defer func() {
+ if e := recover(); e != nil {
+ if version > currentVersion {
+ err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
+ } else {
+ err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
+ }
+ }
+ }()
+
+ r := &intReader{bytes.NewReader(data), path}
+
+ version = int(r.uint64())
+ switch version {
+ case currentVersion:
+ default:
+ errorf("unknown iexport format version %d", version)
+ }
+
+ sLen := int64(r.uint64())
+ dLen := int64(r.uint64())
+
+ whence, _ := r.Seek(0, io.SeekCurrent)
+ stringData := data[whence : whence+sLen]
+ declData := data[whence+sLen : whence+sLen+dLen]
+ r.Seek(sLen+dLen, io.SeekCurrent)
+
+ p := iimporter{
+ ipath: path,
+
+ stringData: stringData,
+ stringCache: make(map[uint64]string),
+ pkgCache: make(map[uint64]*types.Package),
+
+ declData: declData,
+ pkgIndex: make(map[*types.Package]map[string]uint64),
+ typCache: make(map[uint64]types.Type),
+
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*token.File),
+ },
+ }
+
+ for i, pt := range predeclared {
+ p.typCache[uint64(i)] = pt
+ }
+
+ pkgList := make([]*types.Package, r.uint64())
+ for i := range pkgList {
+ pkgPathOff := r.uint64()
+ pkgPath := p.stringAt(pkgPathOff)
+ pkgName := p.stringAt(r.uint64())
+ _ = r.uint64() // package height; unused by go/types
+
+ if pkgPath == "" {
+ pkgPath = path
+ }
+ pkg := imports[pkgPath]
+ if pkg == nil {
+ pkg = types.NewPackage(pkgPath, pkgName)
+ imports[pkgPath] = pkg
+ } else if pkg.Name() != pkgName {
+ errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
+ }
+
+ p.pkgCache[pkgPathOff] = pkg
+
+ nameIndex := make(map[string]uint64)
+ for nSyms := r.uint64(); nSyms > 0; nSyms-- {
+ name := p.stringAt(r.uint64())
+ nameIndex[name] = r.uint64()
+ }
+
+ p.pkgIndex[pkg] = nameIndex
+ pkgList[i] = pkg
+ }
+
+ localpkg := pkgList[0]
+
+ names := make([]string, 0, len(p.pkgIndex[localpkg]))
+ for name := range p.pkgIndex[localpkg] {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ p.doDecl(localpkg, name)
+ }
+
+ for _, typ := range p.interfaceList {
+ typ.Complete()
+ }
+
+ // record all referenced packages as imports
+ list := append(([]*types.Package)(nil), pkgList[1:]...)
+ sort.Sort(byPath(list))
+ localpkg.SetImports(list)
+
+ // package was imported completely and without errors
+ localpkg.MarkComplete()
+
+ consumed, _ := r.Seek(0, io.SeekCurrent)
+ return int(consumed), localpkg, nil
+}
+
+type iimporter struct {
+ ipath string
+
+ stringData []byte
+ stringCache map[uint64]string
+ pkgCache map[uint64]*types.Package
+
+ declData []byte
+ pkgIndex map[*types.Package]map[string]uint64
+ typCache map[uint64]types.Type
+
+ fake fakeFileSet
+ interfaceList []*types.Interface
+}
+
+func (p *iimporter) doDecl(pkg *types.Package, name string) {
+ // See if we've already imported this declaration.
+ if obj := pkg.Scope().Lookup(name); obj != nil {
+ return
+ }
+
+ off, ok := p.pkgIndex[pkg][name]
+ if !ok {
+ errorf("%v.%v not in index", pkg, name)
+ }
+
+ r := &importReader{p: p, currPkg: pkg}
+ r.declReader.Reset(p.declData[off:])
+
+ r.obj(name)
+}
+
+func (p *iimporter) stringAt(off uint64) string {
+ if s, ok := p.stringCache[off]; ok {
+ return s
+ }
+
+ slen, n := binary.Uvarint(p.stringData[off:])
+ if n <= 0 {
+ errorf("varint failed")
+ }
+ spos := off + uint64(n)
+ s := string(p.stringData[spos : spos+slen])
+ p.stringCache[off] = s
+ return s
+}
+
+func (p *iimporter) pkgAt(off uint64) *types.Package {
+ if pkg, ok := p.pkgCache[off]; ok {
+ return pkg
+ }
+ path := p.stringAt(off)
+ errorf("missing package %q in %q", path, p.ipath)
+ return nil
+}
+
+func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
+ if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
+ return t
+ }
+
+ if off < predeclReserved {
+ errorf("predeclared type missing from cache: %v", off)
+ }
+
+ r := &importReader{p: p}
+ r.declReader.Reset(p.declData[off-predeclReserved:])
+ t := r.doType(base)
+
+ if base == nil || !isInterface(t) {
+ p.typCache[off] = t
+ }
+ return t
+}
+
+type importReader struct {
+ p *iimporter
+ declReader bytes.Reader
+ currPkg *types.Package
+ prevFile string
+ prevLine int64
+}
+
+func (r *importReader) obj(name string) {
+ tag := r.byte()
+ pos := r.pos()
+
+ switch tag {
+ case 'A':
+ typ := r.typ()
+
+ r.declare(types.NewTypeName(pos, r.currPkg, name, typ))
+
+ case 'C':
+ typ, val := r.value()
+
+ r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
+
+ case 'F':
+ sig := r.signature(nil)
+
+ r.declare(types.NewFunc(pos, r.currPkg, name, sig))
+
+ case 'T':
+ // Types can be recursive. We need to setup a stub
+ // declaration before recursing.
+ obj := types.NewTypeName(pos, r.currPkg, name, nil)
+ named := types.NewNamed(obj, nil, nil)
+ r.declare(obj)
+
+ underlying := r.p.typAt(r.uint64(), named).Underlying()
+ named.SetUnderlying(underlying)
+
+ if !isInterface(underlying) {
+ for n := r.uint64(); n > 0; n-- {
+ mpos := r.pos()
+ mname := r.ident()
+ recv := r.param()
+ msig := r.signature(recv)
+
+ named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
+ }
+ }
+
+ case 'V':
+ typ := r.typ()
+
+ r.declare(types.NewVar(pos, r.currPkg, name, typ))
+
+ default:
+ errorf("unexpected tag: %v", tag)
+ }
+}
+
+func (r *importReader) declare(obj types.Object) {
+ obj.Pkg().Scope().Insert(obj)
+}
+
+func (r *importReader) value() (typ types.Type, val constant.Value) {
+ typ = r.typ()
+
+ switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
+ case types.IsBoolean:
+ val = constant.MakeBool(r.bool())
+
+ case types.IsString:
+ val = constant.MakeString(r.string())
+
+ case types.IsInteger:
+ val = r.mpint(b)
+
+ case types.IsFloat:
+ val = r.mpfloat(b)
+
+ case types.IsComplex:
+ re := r.mpfloat(b)
+ im := r.mpfloat(b)
+ val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
+
+ default:
+ errorf("unexpected type %v", typ) // panics
+ panic("unreachable")
+ }
+
+ return
+}
+
+func intSize(b *types.Basic) (signed bool, maxBytes uint) {
+ if (b.Info() & types.IsUntyped) != 0 {
+ return true, 64
+ }
+
+ switch b.Kind() {
+ case types.Float32, types.Complex64:
+ return true, 3
+ case types.Float64, types.Complex128:
+ return true, 7
+ }
+
+ signed = (b.Info() & types.IsUnsigned) == 0
+ switch b.Kind() {
+ case types.Int8, types.Uint8:
+ maxBytes = 1
+ case types.Int16, types.Uint16:
+ maxBytes = 2
+ case types.Int32, types.Uint32:
+ maxBytes = 4
+ default:
+ maxBytes = 8
+ }
+
+ return
+}
+
+func (r *importReader) mpint(b *types.Basic) constant.Value {
+ signed, maxBytes := intSize(b)
+
+ maxSmall := 256 - maxBytes
+ if signed {
+ maxSmall = 256 - 2*maxBytes
+ }
+ if maxBytes == 1 {
+ maxSmall = 256
+ }
+
+ n, _ := r.declReader.ReadByte()
+ if uint(n) < maxSmall {
+ v := int64(n)
+ if signed {
+ v >>= 1
+ if n&1 != 0 {
+ v = ^v
+ }
+ }
+ return constant.MakeInt64(v)
+ }
+
+ v := -n
+ if signed {
+ v = -(n &^ 1) >> 1
+ }
+ if v < 1 || uint(v) > maxBytes {
+ errorf("weird decoding: %v, %v => %v", n, signed, v)
+ }
+
+ buf := make([]byte, v)
+ io.ReadFull(&r.declReader, buf)
+
+ // convert to little endian
+ // TODO(gri) go/constant should have a more direct conversion function
+ // (e.g., once it supports a big.Float based implementation)
+ for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
+ buf[i], buf[j] = buf[j], buf[i]
+ }
+
+ x := constant.MakeFromBytes(buf)
+ if signed && n&1 != 0 {
+ x = constant.UnaryOp(token.SUB, x, 0)
+ }
+ return x
+}
+
+func (r *importReader) mpfloat(b *types.Basic) constant.Value {
+ x := r.mpint(b)
+ if constant.Sign(x) == 0 {
+ return x
+ }
+
+ exp := r.int64()
+ switch {
+ case exp > 0:
+ x = constant.Shift(x, token.SHL, uint(exp))
+ case exp < 0:
+ d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
+ x = constant.BinaryOp(x, token.QUO, d)
+ }
+ return x
+}
+
+func (r *importReader) ident() string {
+ return r.string()
+}
+
+func (r *importReader) qualifiedIdent() (*types.Package, string) {
+ name := r.string()
+ pkg := r.pkg()
+ return pkg, name
+}
+
+func (r *importReader) pos() token.Pos {
+ delta := r.int64()
+ if delta != deltaNewFile {
+ r.prevLine += delta
+ } else if l := r.int64(); l == -1 {
+ r.prevLine += deltaNewFile
+ } else {
+ r.prevFile = r.string()
+ r.prevLine = l
+ }
+
+ if r.prevFile == "" && r.prevLine == 0 {
+ return token.NoPos
+ }
+
+ return r.p.fake.pos(r.prevFile, int(r.prevLine))
+}
+
+func (r *importReader) typ() types.Type {
+ return r.p.typAt(r.uint64(), nil)
+}
+
+func isInterface(t types.Type) bool {
+ _, ok := t.(*types.Interface)
+ return ok
+}
+
+func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
+func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
+
+func (r *importReader) doType(base *types.Named) types.Type {
+ switch k := r.kind(); k {
+ default:
+ errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
+ return nil
+
+ case definedType:
+ pkg, name := r.qualifiedIdent()
+ r.p.doDecl(pkg, name)
+ return pkg.Scope().Lookup(name).(*types.TypeName).Type()
+ case pointerType:
+ return types.NewPointer(r.typ())
+ case sliceType:
+ return types.NewSlice(r.typ())
+ case arrayType:
+ n := r.uint64()
+ return types.NewArray(r.typ(), int64(n))
+ case chanType:
+ dir := chanDir(int(r.uint64()))
+ return types.NewChan(dir, r.typ())
+ case mapType:
+ return types.NewMap(r.typ(), r.typ())
+ case signatureType:
+ r.currPkg = r.pkg()
+ return r.signature(nil)
+
+ case structType:
+ r.currPkg = r.pkg()
+
+ fields := make([]*types.Var, r.uint64())
+ tags := make([]string, len(fields))
+ for i := range fields {
+ fpos := r.pos()
+ fname := r.ident()
+ ftyp := r.typ()
+ emb := r.bool()
+ tag := r.string()
+
+ fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb)
+ tags[i] = tag
+ }
+ return types.NewStruct(fields, tags)
+
+ case interfaceType:
+ r.currPkg = r.pkg()
+
+ embeddeds := make([]types.Type, r.uint64())
+ for i := range embeddeds {
+ _ = r.pos()
+ embeddeds[i] = r.typ()
+ }
+
+ methods := make([]*types.Func, r.uint64())
+ for i := range methods {
+ mpos := r.pos()
+ mname := r.ident()
+
+ // TODO(mdempsky): Matches bimport.go, but I
+ // don't agree with this.
+ var recv *types.Var
+ if base != nil {
+ recv = types.NewVar(token.NoPos, r.currPkg, "", base)
+ }
+
+ msig := r.signature(recv)
+ methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
+ }
+
+ typ := newInterface(methods, embeddeds)
+ r.p.interfaceList = append(r.p.interfaceList, typ)
+ return typ
+ }
+}
+
+func (r *importReader) kind() itag {
+ return itag(r.uint64())
+}
+
+func (r *importReader) signature(recv *types.Var) *types.Signature {
+ params := r.paramList()
+ results := r.paramList()
+ variadic := params.Len() > 0 && r.bool()
+ return types.NewSignature(recv, params, results, variadic)
+}
+
+func (r *importReader) paramList() *types.Tuple {
+ xs := make([]*types.Var, r.uint64())
+ for i := range xs {
+ xs[i] = r.param()
+ }
+ return types.NewTuple(xs...)
+}
+
+func (r *importReader) param() *types.Var {
+ pos := r.pos()
+ name := r.ident()
+ typ := r.typ()
+ return types.NewParam(pos, r.currPkg, name, typ)
+}
+
+func (r *importReader) bool() bool {
+ return r.uint64() != 0
+}
+
+func (r *importReader) int64() int64 {
+ n, err := binary.ReadVarint(&r.declReader)
+ if err != nil {
+ errorf("readVarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) uint64() uint64 {
+ n, err := binary.ReadUvarint(&r.declReader)
+ if err != nil {
+ errorf("readUvarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) byte() byte {
+ x, err := r.declReader.ReadByte()
+ if err != nil {
+ errorf("declReader.ReadByte: %v", err)
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias18.go b/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias18.go
new file mode 100644
index 00000000..225ffeed
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias18.go
@@ -0,0 +1,13 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !go1.9
+
+package gcimporter
+
+import "go/types"
+
+func isAlias(obj *types.TypeName) bool {
+ return false // there are no type aliases before Go 1.9
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias19.go b/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias19.go
new file mode 100644
index 00000000..c2025d84
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias19.go
@@ -0,0 +1,13 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.9
+
+package gcimporter
+
+import "go/types"
+
+func isAlias(obj *types.TypeName) bool {
+ return obj.IsAlias()
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go
new file mode 100644
index 00000000..463f2522
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go
@@ -0,0 +1,21 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !go1.11
+
+package gcimporter
+
+import "go/types"
+
+func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
+ named := make([]*types.Named, len(embeddeds))
+ for i, e := range embeddeds {
+ var ok bool
+ named[i], ok = e.(*types.Named)
+ if !ok {
+ panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11")
+ }
+ }
+ return types.NewInterface(methods, named)
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go
new file mode 100644
index 00000000..ab28b95c
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go
@@ -0,0 +1,13 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build go1.11
+
+package gcimporter
+
+import "go/types"
+
+func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
+ return types.NewInterfaceType(methods, embeddeds)
+}
diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go
deleted file mode 100644
index 9b51c9ec..00000000
--- a/vendor/golang.org/x/tools/go/loader/doc.go
+++ /dev/null
@@ -1,205 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package loader loads a complete Go program from source code, parsing
-// and type-checking the initial packages plus their transitive closure
-// of dependencies. The ASTs and the derived facts are retained for
-// later use.
-//
-// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE.
-//
-// The package defines two primary types: Config, which specifies a
-// set of initial packages to load and various other options; and
-// Program, which is the result of successfully loading the packages
-// specified by a configuration.
-//
-// The configuration can be set directly, but *Config provides various
-// convenience methods to simplify the common cases, each of which can
-// be called any number of times. Finally, these are followed by a
-// call to Load() to actually load and type-check the program.
-//
-// var conf loader.Config
-//
-// // Use the command-line arguments to specify
-// // a set of initial packages to load from source.
-// // See FromArgsUsage for help.
-// rest, err := conf.FromArgs(os.Args[1:], wantTests)
-//
-// // Parse the specified files and create an ad hoc package with path "foo".
-// // All files must have the same 'package' declaration.
-// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
-//
-// // Create an ad hoc package with path "foo" from
-// // the specified already-parsed files.
-// // All ASTs must have the same 'package' declaration.
-// conf.CreateFromFiles("foo", parsedFiles)
-//
-// // Add "runtime" to the set of packages to be loaded.
-// conf.Import("runtime")
-//
-// // Adds "fmt" and "fmt_test" to the set of packages
-// // to be loaded. "fmt" will include *_test.go files.
-// conf.ImportWithTests("fmt")
-//
-// // Finally, load all the packages specified by the configuration.
-// prog, err := conf.Load()
-//
-// See examples_test.go for examples of API usage.
-//
-//
-// CONCEPTS AND TERMINOLOGY
-//
-// The WORKSPACE is the set of packages accessible to the loader. The
-// workspace is defined by Config.Build, a *build.Context. The
-// default context treats subdirectories of $GOROOT and $GOPATH as
-// packages, but this behavior may be overridden.
-//
-// An AD HOC package is one specified as a set of source files on the
-// command line. In the simplest case, it may consist of a single file
-// such as $GOROOT/src/net/http/triv.go.
-//
-// EXTERNAL TEST packages are those comprised of a set of *_test.go
-// files all with the same 'package foo_test' declaration, all in the
-// same directory. (go/build.Package calls these files XTestFiles.)
-//
-// An IMPORTABLE package is one that can be referred to by some import
-// spec. Every importable package is uniquely identified by its
-// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json",
-// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path
-// typically denotes a subdirectory of the workspace.
-//
-// An import declaration uses an IMPORT PATH to refer to a package.
-// Most import declarations use the package path as the import path.
-//
-// Due to VENDORING (https://golang.org/s/go15vendor), the
-// interpretation of an import path may depend on the directory in which
-// it appears. To resolve an import path to a package path, go/build
-// must search the enclosing directories for a subdirectory named
-// "vendor".
-//
-// ad hoc packages and external test packages are NON-IMPORTABLE. The
-// path of an ad hoc package is inferred from the package
-// declarations of its files and is therefore not a unique package key.
-// For example, Config.CreatePkgs may specify two initial ad hoc
-// packages, both with path "main".
-//
-// An AUGMENTED package is an importable package P plus all the
-// *_test.go files with same 'package foo' declaration as P.
-// (go/build.Package calls these files TestFiles.)
-//
-// The INITIAL packages are those specified in the configuration. A
-// DEPENDENCY is a package loaded to satisfy an import in an initial
-// package or another dependency.
-//
-package loader
-
-// IMPLEMENTATION NOTES
-//
-// 'go test', in-package test files, and import cycles
-// ---------------------------------------------------
-//
-// An external test package may depend upon members of the augmented
-// package that are not in the unaugmented package, such as functions
-// that expose internals. (See bufio/export_test.go for an example.)
-// So, the loader must ensure that for each external test package
-// it loads, it also augments the corresponding non-test package.
-//
-// The import graph over n unaugmented packages must be acyclic; the
-// import graph over n-1 unaugmented packages plus one augmented
-// package must also be acyclic. ('go test' relies on this.) But the
-// import graph over n augmented packages may contain cycles.
-//
-// First, all the (unaugmented) non-test packages and their
-// dependencies are imported in the usual way; the loader reports an
-// error if it detects an import cycle.
-//
-// Then, each package P for which testing is desired is augmented by
-// the list P' of its in-package test files, by calling
-// (*types.Checker).Files. This arrangement ensures that P' may
-// reference definitions within P, but P may not reference definitions
-// within P'. Furthermore, P' may import any other package, including
-// ones that depend upon P, without an import cycle error.
-//
-// Consider two packages A and B, both of which have lists of
-// in-package test files we'll call A' and B', and which have the
-// following import graph edges:
-// B imports A
-// B' imports A
-// A' imports B
-// This last edge would be expected to create an error were it not
-// for the special type-checking discipline above.
-// Cycles of size greater than two are possible. For example:
-// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil"
-// io/ioutil/tempfile_test.go (package ioutil) imports "regexp"
-// regexp/exec_test.go (package regexp) imports "compress/bzip2"
-//
-//
-// Concurrency
-// -----------
-//
-// Let us define the import dependency graph as follows. Each node is a
-// list of files passed to (Checker).Files at once. Many of these lists
-// are the production code of an importable Go package, so those nodes
-// are labelled by the package's path. The remaining nodes are
-// ad hoc packages and lists of in-package *_test.go files that augment
-// an importable package; those nodes have no label.
-//
-// The edges of the graph represent import statements appearing within a
-// file. An edge connects a node (a list of files) to the node it
-// imports, which is importable and thus always labelled.
-//
-// Loading is controlled by this dependency graph.
-//
-// To reduce I/O latency, we start loading a package's dependencies
-// asynchronously as soon as we've parsed its files and enumerated its
-// imports (scanImports). This performs a preorder traversal of the
-// import dependency graph.
-//
-// To exploit hardware parallelism, we type-check unrelated packages in
-// parallel, where "unrelated" means not ordered by the partial order of
-// the import dependency graph.
-//
-// We use a concurrency-safe non-blocking cache (importer.imported) to
-// record the results of type-checking, whether success or failure. An
-// entry is created in this cache by startLoad the first time the
-// package is imported. The first goroutine to request an entry becomes
-// responsible for completing the task and broadcasting completion to
-// subsequent requestors, which block until then.
-//
-// Type checking occurs in (parallel) postorder: we cannot type-check a
-// set of files until we have loaded and type-checked all of their
-// immediate dependencies (and thus all of their transitive
-// dependencies). If the input were guaranteed free of import cycles,
-// this would be trivial: we could simply wait for completion of the
-// dependencies and then invoke the typechecker.
-//
-// But as we saw in the 'go test' section above, some cycles in the
-// import graph over packages are actually legal, so long as the
-// cycle-forming edge originates in the in-package test files that
-// augment the package. This explains why the nodes of the import
-// dependency graph are not packages, but lists of files: the unlabelled
-// nodes avoid the cycles. Consider packages A and B where B imports A
-// and A's in-package tests AT import B. The naively constructed import
-// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but
-// the graph over lists of files is AT --> B --> A, where AT is an
-// unlabelled node.
-//
-// Awaiting completion of the dependencies in a cyclic graph would
-// deadlock, so we must materialize the import dependency graph (as
-// importer.graph) and check whether each import edge forms a cycle. If
-// x imports y, and the graph already contains a path from y to x, then
-// there is an import cycle, in which case the processing of x must not
-// wait for the completion of processing of y.
-//
-// When the type-checker makes a callback (doImport) to the loader for a
-// given import edge, there are two possible cases. In the normal case,
-// the dependency has already been completely type-checked; doImport
-// does a cache lookup and returns it. In the cyclic case, the entry in
-// the cache is still necessarily incomplete, indicating a cycle. We
-// perform the cycle check again to obtain the error message, and return
-// the error.
-//
-// The result of using concurrency is about a 2.5x speedup for stdlib_test.
-
-// TODO(adonovan): overhaul the package documentation.
diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go
deleted file mode 100644
index c4566611..00000000
--- a/vendor/golang.org/x/tools/go/loader/loader.go
+++ /dev/null
@@ -1,1078 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package loader
-
-// See doc.go for package documentation and implementation notes.
-
-import (
- "errors"
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "go/types"
- "os"
- "path/filepath"
- "sort"
- "strings"
- "sync"
- "time"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/go/internal/cgo"
-)
-
-var ignoreVendor build.ImportMode
-
-const trace = false // show timing info for type-checking
-
-// Config specifies the configuration for loading a whole program from
-// Go source code.
-// The zero value for Config is a ready-to-use default configuration.
-type Config struct {
- // Fset is the file set for the parser to use when loading the
- // program. If nil, it may be lazily initialized by any
- // method of Config.
- Fset *token.FileSet
-
- // ParserMode specifies the mode to be used by the parser when
- // loading source packages.
- ParserMode parser.Mode
-
- // TypeChecker contains options relating to the type checker.
- //
- // The supplied IgnoreFuncBodies is not used; the effective
- // value comes from the TypeCheckFuncBodies func below.
- // The supplied Import function is not used either.
- TypeChecker types.Config
-
- // TypeCheckFuncBodies is a predicate over package paths.
- // A package for which the predicate is false will
- // have its package-level declarations type checked, but not
- // its function bodies; this can be used to quickly load
- // dependencies from source. If nil, all func bodies are type
- // checked.
- TypeCheckFuncBodies func(path string) bool
-
- // If Build is non-nil, it is used to locate source packages.
- // Otherwise &build.Default is used.
- //
- // By default, cgo is invoked to preprocess Go files that
- // import the fake package "C". This behaviour can be
- // disabled by setting CGO_ENABLED=0 in the environment prior
- // to startup, or by setting Build.CgoEnabled=false.
- Build *build.Context
-
- // The current directory, used for resolving relative package
- // references such as "./go/loader". If empty, os.Getwd will be
- // used instead.
- Cwd string
-
- // If DisplayPath is non-nil, it is used to transform each
- // file name obtained from Build.Import(). This can be used
- // to prevent a virtualized build.Config's file names from
- // leaking into the user interface.
- DisplayPath func(path string) string
-
- // If AllowErrors is true, Load will return a Program even
- // if some of the its packages contained I/O, parser or type
- // errors; such errors are accessible via PackageInfo.Errors. If
- // false, Load will fail if any package had an error.
- AllowErrors bool
-
- // CreatePkgs specifies a list of non-importable initial
- // packages to create. The resulting packages will appear in
- // the corresponding elements of the Program.Created slice.
- CreatePkgs []PkgSpec
-
- // ImportPkgs specifies a set of initial packages to load.
- // The map keys are package paths.
- //
- // The map value indicates whether to load tests. If true, Load
- // will add and type-check two lists of files to the package:
- // non-test files followed by in-package *_test.go files. In
- // addition, it will append the external test package (if any)
- // to Program.Created.
- ImportPkgs map[string]bool
-
- // FindPackage is called during Load to create the build.Package
- // for a given import path from a given directory.
- // If FindPackage is nil, (*build.Context).Import is used.
- // A client may use this hook to adapt to a proprietary build
- // system that does not follow the "go build" layout
- // conventions, for example.
- //
- // It must be safe to call concurrently from multiple goroutines.
- FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error)
-
- // AfterTypeCheck is called immediately after a list of files
- // has been type-checked and appended to info.Files.
- //
- // This optional hook function is the earliest opportunity for
- // the client to observe the output of the type checker,
- // which may be useful to reduce analysis latency when loading
- // a large program.
- //
- // The function is permitted to modify info.Info, for instance
- // to clear data structures that are no longer needed, which can
- // dramatically reduce peak memory consumption.
- //
- // The function may be called twice for the same PackageInfo:
- // once for the files of the package and again for the
- // in-package test files.
- //
- // It must be safe to call concurrently from multiple goroutines.
- AfterTypeCheck func(info *PackageInfo, files []*ast.File)
-}
-
-// A PkgSpec specifies a non-importable package to be created by Load.
-// Files are processed first, but typically only one of Files and
-// Filenames is provided. The path needn't be globally unique.
-//
-// For vendoring purposes, the package's directory is the one that
-// contains the first file.
-type PkgSpec struct {
- Path string // package path ("" => use package declaration)
- Files []*ast.File // ASTs of already-parsed files
- Filenames []string // names of files to be parsed
-}
-
-// A Program is a Go program loaded from source as specified by a Config.
-type Program struct {
- Fset *token.FileSet // the file set for this program
-
- // Created[i] contains the initial package whose ASTs or
- // filenames were supplied by Config.CreatePkgs[i], followed by
- // the external test package, if any, of each package in
- // Config.ImportPkgs ordered by ImportPath.
- //
- // NOTE: these files must not import "C". Cgo preprocessing is
- // only performed on imported packages, not ad hoc packages.
- //
- // TODO(adonovan): we need to copy and adapt the logic of
- // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make
- // Config.Import and Config.Create methods return the same kind
- // of entity, essentially a build.Package.
- // Perhaps we can even reuse that type directly.
- Created []*PackageInfo
-
- // Imported contains the initially imported packages,
- // as specified by Config.ImportPkgs.
- Imported map[string]*PackageInfo
-
- // AllPackages contains the PackageInfo of every package
- // encountered by Load: all initial packages and all
- // dependencies, including incomplete ones.
- AllPackages map[*types.Package]*PackageInfo
-
- // importMap is the canonical mapping of package paths to
- // packages. It contains all Imported initial packages, but not
- // Created ones, and all imported dependencies.
- importMap map[string]*types.Package
-}
-
-// PackageInfo holds the ASTs and facts derived by the type-checker
-// for a single package.
-//
-// Not mutated once exposed via the API.
-//
-type PackageInfo struct {
- Pkg *types.Package
- Importable bool // true if 'import "Pkg.Path()"' would resolve to this
- TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors
- Files []*ast.File // syntax trees for the package's files
- Errors []error // non-nil if the package had errors
- types.Info // type-checker deductions.
- dir string // package directory
-
- checker *types.Checker // transient type-checker state
- errorFunc func(error)
-}
-
-func (info *PackageInfo) String() string { return info.Pkg.Path() }
-
-func (info *PackageInfo) appendError(err error) {
- if info.errorFunc != nil {
- info.errorFunc(err)
- } else {
- fmt.Fprintln(os.Stderr, err)
- }
- info.Errors = append(info.Errors, err)
-}
-
-func (conf *Config) fset() *token.FileSet {
- if conf.Fset == nil {
- conf.Fset = token.NewFileSet()
- }
- return conf.Fset
-}
-
-// ParseFile is a convenience function (intended for testing) that invokes
-// the parser using the Config's FileSet, which is initialized if nil.
-//
-// src specifies the parser input as a string, []byte, or io.Reader, and
-// filename is its apparent name. If src is nil, the contents of
-// filename are read from the file system.
-//
-func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) {
- // TODO(adonovan): use conf.build() etc like parseFiles does.
- return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode)
-}
-
-// FromArgsUsage is a partial usage message that applications calling
-// FromArgs may wish to include in their -help output.
-const FromArgsUsage = `
-<args> is a list of arguments denoting a set of initial packages.
-It may take one of two forms:
-
-1. A list of *.go source files.
-
- All of the specified files are loaded, parsed and type-checked
- as a single package. All the files must belong to the same directory.
-
-2. A list of import paths, each denoting a package.
-
- The package's directory is found relative to the $GOROOT and
- $GOPATH using similar logic to 'go build', and the *.go files in
- that directory are loaded, parsed and type-checked as a single
- package.
-
- In addition, all *_test.go files in the directory are then loaded
- and parsed. Those files whose package declaration equals that of
- the non-*_test.go files are included in the primary package. Test
- files whose package declaration ends with "_test" are type-checked
- as another package, the 'external' test package, so that a single
- import path may denote two packages. (Whether this behaviour is
- enabled is tool-specific, and may depend on additional flags.)
-
-A '--' argument terminates the list of packages.
-`
-
-// FromArgs interprets args as a set of initial packages to load from
-// source and updates the configuration. It returns the list of
-// unconsumed arguments.
-//
-// It is intended for use in command-line interfaces that require a
-// set of initial packages to be specified; see FromArgsUsage message
-// for details.
-//
-// Only superficial errors are reported at this stage; errors dependent
-// on I/O are detected during Load.
-//
-func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
- var rest []string
- for i, arg := range args {
- if arg == "--" {
- rest = args[i+1:]
- args = args[:i]
- break // consume "--" and return the remaining args
- }
- }
-
- if len(args) > 0 && strings.HasSuffix(args[0], ".go") {
- // Assume args is a list of a *.go files
- // denoting a single ad hoc package.
- for _, arg := range args {
- if !strings.HasSuffix(arg, ".go") {
- return nil, fmt.Errorf("named files must be .go files: %s", arg)
- }
- }
- conf.CreateFromFilenames("", args...)
- } else {
- // Assume args are directories each denoting a
- // package and (perhaps) an external test, iff xtest.
- for _, arg := range args {
- if xtest {
- conf.ImportWithTests(arg)
- } else {
- conf.Import(arg)
- }
- }
- }
-
- return rest, nil
-}
-
-// CreateFromFilenames is a convenience function that adds
-// a conf.CreatePkgs entry to create a package of the specified *.go
-// files.
-//
-func (conf *Config) CreateFromFilenames(path string, filenames ...string) {
- conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames})
-}
-
-// CreateFromFiles is a convenience function that adds a conf.CreatePkgs
-// entry to create package of the specified path and parsed files.
-//
-func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
- conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files})
-}
-
-// ImportWithTests is a convenience function that adds path to
-// ImportPkgs, the set of initial source packages located relative to
-// $GOPATH. The package will be augmented by any *_test.go files in
-// its directory that contain a "package x" (not "package x_test")
-// declaration.
-//
-// In addition, if any *_test.go files contain a "package x_test"
-// declaration, an additional package comprising just those files will
-// be added to CreatePkgs.
-//
-func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) }
-
-// Import is a convenience function that adds path to ImportPkgs, the
-// set of initial packages that will be imported from source.
-//
-func (conf *Config) Import(path string) { conf.addImport(path, false) }
-
-func (conf *Config) addImport(path string, tests bool) {
- if path == "C" {
- return // ignore; not a real package
- }
- if conf.ImportPkgs == nil {
- conf.ImportPkgs = make(map[string]bool)
- }
- conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests
-}
-
-// PathEnclosingInterval returns the PackageInfo and ast.Node that
-// contain source interval [start, end), and all the node's ancestors
-// up to the AST root. It searches all ast.Files of all packages in prog.
-// exact is defined as for astutil.PathEnclosingInterval.
-//
-// The zero value is returned if not found.
-//
-func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) {
- for _, info := range prog.AllPackages {
- for _, f := range info.Files {
- if f.Pos() == token.NoPos {
- // This can happen if the parser saw
- // too many errors and bailed out.
- // (Use parser.AllErrors to prevent that.)
- continue
- }
- if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) {
- continue
- }
- if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil {
- return info, path, exact
- }
- }
- }
- return nil, nil, false
-}
-
-// InitialPackages returns a new slice containing the set of initial
-// packages (Created + Imported) in unspecified order.
-//
-func (prog *Program) InitialPackages() []*PackageInfo {
- infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported))
- infos = append(infos, prog.Created...)
- for _, info := range prog.Imported {
- infos = append(infos, info)
- }
- return infos
-}
-
-// Package returns the ASTs and results of type checking for the
-// specified package.
-func (prog *Program) Package(path string) *PackageInfo {
- if info, ok := prog.AllPackages[prog.importMap[path]]; ok {
- return info
- }
- for _, info := range prog.Created {
- if path == info.Pkg.Path() {
- return info
- }
- }
- return nil
-}
-
-// ---------- Implementation ----------
-
-// importer holds the working state of the algorithm.
-type importer struct {
- conf *Config // the client configuration
- start time.Time // for logging
-
- progMu sync.Mutex // guards prog
- prog *Program // the resulting program
-
- // findpkg is a memoization of FindPackage.
- findpkgMu sync.Mutex // guards findpkg
- findpkg map[findpkgKey]*findpkgValue
-
- importedMu sync.Mutex // guards imported
- imported map[string]*importInfo // all imported packages (incl. failures) by import path
-
- // import dependency graph: graph[x][y] => x imports y
- //
- // Since non-importable packages cannot be cyclic, we ignore
- // their imports, thus we only need the subgraph over importable
- // packages. Nodes are identified by their import paths.
- graphMu sync.Mutex
- graph map[string]map[string]bool
-}
-
-type findpkgKey struct {
- importPath string
- fromDir string
- mode build.ImportMode
-}
-
-type findpkgValue struct {
- ready chan struct{} // closed to broadcast readiness
- bp *build.Package
- err error
-}
-
-// importInfo tracks the success or failure of a single import.
-//
-// Upon completion, exactly one of info and err is non-nil:
-// info on successful creation of a package, err otherwise.
-// A successful package may still contain type errors.
-//
-type importInfo struct {
- path string // import path
- info *PackageInfo // results of typechecking (including errors)
- complete chan struct{} // closed to broadcast that info is set.
-}
-
-// awaitCompletion blocks until ii is complete,
-// i.e. the info field is safe to inspect.
-func (ii *importInfo) awaitCompletion() {
- <-ii.complete // wait for close
-}
-
-// Complete marks ii as complete.
-// Its info and err fields will not be subsequently updated.
-func (ii *importInfo) Complete(info *PackageInfo) {
- if info == nil {
- panic("info == nil")
- }
- ii.info = info
- close(ii.complete)
-}
-
-type importError struct {
- path string // import path
- err error // reason for failure to create a package
-}
-
-// Load creates the initial packages specified by conf.{Create,Import}Pkgs,
-// loading their dependencies packages as needed.
-//
-// On success, Load returns a Program containing a PackageInfo for
-// each package. On failure, it returns an error.
-//
-// If AllowErrors is true, Load will return a Program even if some
-// packages contained I/O, parser or type errors, or if dependencies
-// were missing. (Such errors are accessible via PackageInfo.Errors. If
-// false, Load will fail if any package had an error.
-//
-// It is an error if no packages were loaded.
-//
-func (conf *Config) Load() (*Program, error) {
- // Create a simple default error handler for parse/type errors.
- if conf.TypeChecker.Error == nil {
- conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) }
- }
-
- // Set default working directory for relative package references.
- if conf.Cwd == "" {
- var err error
- conf.Cwd, err = os.Getwd()
- if err != nil {
- return nil, err
- }
- }
-
- // Install default FindPackage hook using go/build logic.
- if conf.FindPackage == nil {
- conf.FindPackage = (*build.Context).Import
- }
-
- prog := &Program{
- Fset: conf.fset(),
- Imported: make(map[string]*PackageInfo),
- importMap: make(map[string]*types.Package),
- AllPackages: make(map[*types.Package]*PackageInfo),
- }
-
- imp := importer{
- conf: conf,
- prog: prog,
- findpkg: make(map[findpkgKey]*findpkgValue),
- imported: make(map[string]*importInfo),
- start: time.Now(),
- graph: make(map[string]map[string]bool),
- }
-
- // -- loading proper (concurrent phase) --------------------------------
-
- var errpkgs []string // packages that contained errors
-
- // Load the initially imported packages and their dependencies,
- // in parallel.
- // No vendor check on packages imported from the command line.
- infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor)
- for _, ie := range importErrors {
- conf.TypeChecker.Error(ie.err) // failed to create package
- errpkgs = append(errpkgs, ie.path)
- }
- for _, info := range infos {
- prog.Imported[info.Pkg.Path()] = info
- }
-
- // Augment the designated initial packages by their tests.
- // Dependencies are loaded in parallel.
- var xtestPkgs []*build.Package
- for importPath, augment := range conf.ImportPkgs {
- if !augment {
- continue
- }
-
- // No vendor check on packages imported from command line.
- bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor)
- if err != nil {
- // Package not found, or can't even parse package declaration.
- // Already reported by previous loop; ignore it.
- continue
- }
-
- // Needs external test package?
- if len(bp.XTestGoFiles) > 0 {
- xtestPkgs = append(xtestPkgs, bp)
- }
-
- // Consult the cache using the canonical package path.
- path := bp.ImportPath
- imp.importedMu.Lock() // (unnecessary, we're sequential here)
- ii, ok := imp.imported[path]
- // Paranoid checks added due to issue #11012.
- if !ok {
- // Unreachable.
- // The previous loop called importAll and thus
- // startLoad for each path in ImportPkgs, which
- // populates imp.imported[path] with a non-zero value.
- panic(fmt.Sprintf("imported[%q] not found", path))
- }
- if ii == nil {
- // Unreachable.
- // The ii values in this loop are the same as in
- // the previous loop, which enforced the invariant
- // that at least one of ii.err and ii.info is non-nil.
- panic(fmt.Sprintf("imported[%q] == nil", path))
- }
- if ii.info == nil {
- // Unreachable.
- // awaitCompletion has the postcondition
- // ii.info != nil.
- panic(fmt.Sprintf("imported[%q].info = nil", path))
- }
- info := ii.info
- imp.importedMu.Unlock()
-
- // Parse the in-package test files.
- files, errs := imp.conf.parsePackageFiles(bp, 't')
- for _, err := range errs {
- info.appendError(err)
- }
-
- // The test files augmenting package P cannot be imported,
- // but may import packages that import P,
- // so we must disable the cycle check.
- imp.addFiles(info, files, false)
- }
-
- createPkg := func(path, dir string, files []*ast.File, errs []error) {
- info := imp.newPackageInfo(path, dir)
- for _, err := range errs {
- info.appendError(err)
- }
-
- // Ad hoc packages are non-importable,
- // so no cycle check is needed.
- // addFiles loads dependencies in parallel.
- imp.addFiles(info, files, false)
- prog.Created = append(prog.Created, info)
- }
-
- // Create packages specified by conf.CreatePkgs.
- for _, cp := range conf.CreatePkgs {
- files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode)
- files = append(files, cp.Files...)
-
- path := cp.Path
- if path == "" {
- if len(files) > 0 {
- path = files[0].Name.Name
- } else {
- path = "(unnamed)"
- }
- }
-
- dir := conf.Cwd
- if len(files) > 0 && files[0].Pos().IsValid() {
- dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name())
- }
- createPkg(path, dir, files, errs)
- }
-
- // Create external test packages.
- sort.Sort(byImportPath(xtestPkgs))
- for _, bp := range xtestPkgs {
- files, errs := imp.conf.parsePackageFiles(bp, 'x')
- createPkg(bp.ImportPath+"_test", bp.Dir, files, errs)
- }
-
- // -- finishing up (sequential) ----------------------------------------
-
- if len(prog.Imported)+len(prog.Created) == 0 {
- return nil, errors.New("no initial packages were loaded")
- }
-
- // Create infos for indirectly imported packages.
- // e.g. incomplete packages without syntax, loaded from export data.
- for _, obj := range prog.importMap {
- info := prog.AllPackages[obj]
- if info == nil {
- prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true}
- } else {
- // finished
- info.checker = nil
- info.errorFunc = nil
- }
- }
-
- if !conf.AllowErrors {
- // Report errors in indirectly imported packages.
- for _, info := range prog.AllPackages {
- if len(info.Errors) > 0 {
- errpkgs = append(errpkgs, info.Pkg.Path())
- }
- }
- if errpkgs != nil {
- var more string
- if len(errpkgs) > 3 {
- more = fmt.Sprintf(" and %d more", len(errpkgs)-3)
- errpkgs = errpkgs[:3]
- }
- return nil, fmt.Errorf("couldn't load packages due to errors: %s%s",
- strings.Join(errpkgs, ", "), more)
- }
- }
-
- markErrorFreePackages(prog.AllPackages)
-
- return prog, nil
-}
-
-type byImportPath []*build.Package
-
-func (b byImportPath) Len() int { return len(b) }
-func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath }
-func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
-
-// markErrorFreePackages sets the TransitivelyErrorFree flag on all
-// applicable packages.
-func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) {
- // Build the transpose of the import graph.
- importedBy := make(map[*types.Package]map[*types.Package]bool)
- for P := range allPackages {
- for _, Q := range P.Imports() {
- clients, ok := importedBy[Q]
- if !ok {
- clients = make(map[*types.Package]bool)
- importedBy[Q] = clients
- }
- clients[P] = true
- }
- }
-
- // Find all packages reachable from some error package.
- reachable := make(map[*types.Package]bool)
- var visit func(*types.Package)
- visit = func(p *types.Package) {
- if !reachable[p] {
- reachable[p] = true
- for q := range importedBy[p] {
- visit(q)
- }
- }
- }
- for _, info := range allPackages {
- if len(info.Errors) > 0 {
- visit(info.Pkg)
- }
- }
-
- // Mark the others as "transitively error-free".
- for _, info := range allPackages {
- if !reachable[info.Pkg] {
- info.TransitivelyErrorFree = true
- }
- }
-}
-
-// build returns the effective build context.
-func (conf *Config) build() *build.Context {
- if conf.Build != nil {
- return conf.Build
- }
- return &build.Default
-}
-
-// parsePackageFiles enumerates the files belonging to package path,
-// then loads, parses and returns them, plus a list of I/O or parse
-// errors that were encountered.
-//
-// 'which' indicates which files to include:
-// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
-// 't': include in-package *_test.go source files (TestGoFiles)
-// 'x': include external *_test.go source files. (XTestGoFiles)
-//
-func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) {
- if bp.ImportPath == "unsafe" {
- return nil, nil
- }
- var filenames []string
- switch which {
- case 'g':
- filenames = bp.GoFiles
- case 't':
- filenames = bp.TestGoFiles
- case 'x':
- filenames = bp.XTestGoFiles
- default:
- panic(which)
- }
-
- files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode)
-
- // Preprocess CgoFiles and parse the outputs (sequentially).
- if which == 'g' && bp.CgoFiles != nil {
- cgofiles, err := cgo.ProcessFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode)
- if err != nil {
- errs = append(errs, err)
- } else {
- files = append(files, cgofiles...)
- }
- }
-
- return files, errs
-}
-
-// doImport imports the package denoted by path.
-// It implements the types.Importer signature.
-//
-// It returns an error if a package could not be created
-// (e.g. go/build or parse error), but type errors are reported via
-// the types.Config.Error callback (the first of which is also saved
-// in the package's PackageInfo).
-//
-// Idempotent.
-//
-func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) {
- if to == "C" {
- // This should be unreachable, but ad hoc packages are
- // not currently subject to cgo preprocessing.
- // See https://github.com/golang/go/issues/11627.
- return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`,
- from.Pkg.Path())
- }
-
- bp, err := imp.findPackage(to, from.dir, 0)
- if err != nil {
- return nil, err
- }
-
- // The standard unsafe package is handled specially,
- // and has no PackageInfo.
- if bp.ImportPath == "unsafe" {
- return types.Unsafe, nil
- }
-
- // Look for the package in the cache using its canonical path.
- path := bp.ImportPath
- imp.importedMu.Lock()
- ii := imp.imported[path]
- imp.importedMu.Unlock()
- if ii == nil {
- panic("internal error: unexpected import: " + path)
- }
- if ii.info != nil {
- return ii.info.Pkg, nil
- }
-
- // Import of incomplete package: this indicates a cycle.
- fromPath := from.Pkg.Path()
- if cycle := imp.findPath(path, fromPath); cycle != nil {
- cycle = append([]string{fromPath}, cycle...)
- return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> "))
- }
-
- panic("internal error: import of incomplete (yet acyclic) package: " + fromPath)
-}
-
-// findPackage locates the package denoted by the importPath in the
-// specified directory.
-func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) {
- // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7)
- // to avoid holding the lock around FindPackage.
- key := findpkgKey{importPath, fromDir, mode}
- imp.findpkgMu.Lock()
- v, ok := imp.findpkg[key]
- if ok {
- // cache hit
- imp.findpkgMu.Unlock()
-
- <-v.ready // wait for entry to become ready
- } else {
- // Cache miss: this goroutine becomes responsible for
- // populating the map entry and broadcasting its readiness.
- v = &findpkgValue{ready: make(chan struct{})}
- imp.findpkg[key] = v
- imp.findpkgMu.Unlock()
-
- ioLimit <- true
- v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode)
- <-ioLimit
-
- if _, ok := v.err.(*build.NoGoError); ok {
- v.err = nil // empty directory is not an error
- }
-
- close(v.ready) // broadcast ready condition
- }
- return v.bp, v.err
-}
-
-// importAll loads, parses, and type-checks the specified packages in
-// parallel and returns their completed importInfos in unspecified order.
-//
-// fromPath is the package path of the importing package, if it is
-// importable, "" otherwise. It is used for cycle detection.
-//
-// fromDir is the directory containing the import declaration that
-// caused these imports.
-//
-func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) {
- // TODO(adonovan): opt: do the loop in parallel once
- // findPackage is non-blocking.
- var pending []*importInfo
- for importPath := range imports {
- bp, err := imp.findPackage(importPath, fromDir, mode)
- if err != nil {
- errors = append(errors, importError{
- path: importPath,
- err: err,
- })
- continue
- }
- pending = append(pending, imp.startLoad(bp))
- }
-
- if fromPath != "" {
- // We're loading a set of imports.
- //
- // We must record graph edges from the importing package
- // to its dependencies, and check for cycles.
- imp.graphMu.Lock()
- deps, ok := imp.graph[fromPath]
- if !ok {
- deps = make(map[string]bool)
- imp.graph[fromPath] = deps
- }
- for _, ii := range pending {
- deps[ii.path] = true
- }
- imp.graphMu.Unlock()
- }
-
- for _, ii := range pending {
- if fromPath != "" {
- if cycle := imp.findPath(ii.path, fromPath); cycle != nil {
- // Cycle-forming import: we must not await its
- // completion since it would deadlock.
- //
- // We don't record the error in ii since
- // the error is really associated with the
- // cycle-forming edge, not the package itself.
- // (Also it would complicate the
- // invariants of importPath completion.)
- if trace {
- fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle)
- }
- continue
- }
- }
- ii.awaitCompletion()
- infos = append(infos, ii.info)
- }
-
- return infos, errors
-}
-
-// findPath returns an arbitrary path from 'from' to 'to' in the import
-// graph, or nil if there was none.
-func (imp *importer) findPath(from, to string) []string {
- imp.graphMu.Lock()
- defer imp.graphMu.Unlock()
-
- seen := make(map[string]bool)
- var search func(stack []string, importPath string) []string
- search = func(stack []string, importPath string) []string {
- if !seen[importPath] {
- seen[importPath] = true
- stack = append(stack, importPath)
- if importPath == to {
- return stack
- }
- for x := range imp.graph[importPath] {
- if p := search(stack, x); p != nil {
- return p
- }
- }
- }
- return nil
- }
- return search(make([]string, 0, 20), from)
-}
-
-// startLoad initiates the loading, parsing and type-checking of the
-// specified package and its dependencies, if it has not already begun.
-//
-// It returns an importInfo, not necessarily in a completed state. The
-// caller must call awaitCompletion() before accessing its info field.
-//
-// startLoad is concurrency-safe and idempotent.
-//
-func (imp *importer) startLoad(bp *build.Package) *importInfo {
- path := bp.ImportPath
- imp.importedMu.Lock()
- ii, ok := imp.imported[path]
- if !ok {
- ii = &importInfo{path: path, complete: make(chan struct{})}
- imp.imported[path] = ii
- go func() {
- info := imp.load(bp)
- ii.Complete(info)
- }()
- }
- imp.importedMu.Unlock()
-
- return ii
-}
-
-// load implements package loading by parsing Go source files
-// located by go/build.
-func (imp *importer) load(bp *build.Package) *PackageInfo {
- info := imp.newPackageInfo(bp.ImportPath, bp.Dir)
- info.Importable = true
- files, errs := imp.conf.parsePackageFiles(bp, 'g')
- for _, err := range errs {
- info.appendError(err)
- }
-
- imp.addFiles(info, files, true)
-
- imp.progMu.Lock()
- imp.prog.importMap[bp.ImportPath] = info.Pkg
- imp.progMu.Unlock()
-
- return info
-}
-
-// addFiles adds and type-checks the specified files to info, loading
-// their dependencies if needed. The order of files determines the
-// package initialization order. It may be called multiple times on the
-// same package. Errors are appended to the info.Errors field.
-//
-// cycleCheck determines whether the imports within files create
-// dependency edges that should be checked for potential cycles.
-//
-func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) {
- // Ensure the dependencies are loaded, in parallel.
- var fromPath string
- if cycleCheck {
- fromPath = info.Pkg.Path()
- }
- // TODO(adonovan): opt: make the caller do scanImports.
- // Callers with a build.Package can skip it.
- imp.importAll(fromPath, info.dir, scanImports(files), 0)
-
- if trace {
- fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n",
- time.Since(imp.start), info.Pkg.Path(), len(files))
- }
-
- // Don't call checker.Files on Unsafe, even with zero files,
- // because it would mutate the package, which is a global.
- if info.Pkg == types.Unsafe {
- if len(files) > 0 {
- panic(`"unsafe" package contains unexpected files`)
- }
- } else {
- // Ignore the returned (first) error since we
- // already collect them all in the PackageInfo.
- info.checker.Files(files)
- info.Files = append(info.Files, files...)
- }
-
- if imp.conf.AfterTypeCheck != nil {
- imp.conf.AfterTypeCheck(info, files)
- }
-
- if trace {
- fmt.Fprintf(os.Stderr, "%s: stop %q\n",
- time.Since(imp.start), info.Pkg.Path())
- }
-}
-
-func (imp *importer) newPackageInfo(path, dir string) *PackageInfo {
- var pkg *types.Package
- if path == "unsafe" {
- pkg = types.Unsafe
- } else {
- pkg = types.NewPackage(path, "")
- }
- info := &PackageInfo{
- Pkg: pkg,
- Info: types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Scopes: make(map[ast.Node]*types.Scope),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
- },
- errorFunc: imp.conf.TypeChecker.Error,
- dir: dir,
- }
-
- // Copy the types.Config so we can vary it across PackageInfos.
- tc := imp.conf.TypeChecker
- tc.IgnoreFuncBodies = false
- if f := imp.conf.TypeCheckFuncBodies; f != nil {
- tc.IgnoreFuncBodies = !f(path)
- }
- tc.Importer = closure{imp, info}
- tc.Error = info.appendError // appendError wraps the user's Error function
-
- info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info)
- imp.progMu.Lock()
- imp.prog.AllPackages[pkg] = info
- imp.progMu.Unlock()
- return info
-}
-
-type closure struct {
- imp *importer
- info *PackageInfo
-}
-
-func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) }
diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go
deleted file mode 100644
index 7f38dd74..00000000
--- a/vendor/golang.org/x/tools/go/loader/util.go
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package loader
-
-import (
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "io"
- "os"
- "strconv"
- "sync"
-
- "golang.org/x/tools/go/buildutil"
-)
-
-// We use a counting semaphore to limit
-// the number of parallel I/O calls per process.
-var ioLimit = make(chan bool, 10)
-
-// parseFiles parses the Go source files within directory dir and
-// returns the ASTs of the ones that could be at least partially parsed,
-// along with a list of I/O and parse errors encountered.
-//
-// I/O is done via ctxt, which may specify a virtual file system.
-// displayPath is used to transform the filenames attached to the ASTs.
-//
-func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) {
- if displayPath == nil {
- displayPath = func(path string) string { return path }
- }
- var wg sync.WaitGroup
- n := len(files)
- parsed := make([]*ast.File, n)
- errors := make([]error, n)
- for i, file := range files {
- if !buildutil.IsAbsPath(ctxt, file) {
- file = buildutil.JoinPath(ctxt, dir, file)
- }
- wg.Add(1)
- go func(i int, file string) {
- ioLimit <- true // wait
- defer func() {
- wg.Done()
- <-ioLimit // signal
- }()
- var rd io.ReadCloser
- var err error
- if ctxt.OpenFile != nil {
- rd, err = ctxt.OpenFile(file)
- } else {
- rd, err = os.Open(file)
- }
- if err != nil {
- errors[i] = err // open failed
- return
- }
-
- // ParseFile may return both an AST and an error.
- parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode)
- rd.Close()
- }(i, file)
- }
- wg.Wait()
-
- // Eliminate nils, preserving order.
- var o int
- for _, f := range parsed {
- if f != nil {
- parsed[o] = f
- o++
- }
- }
- parsed = parsed[:o]
-
- o = 0
- for _, err := range errors {
- if err != nil {
- errors[o] = err
- o++
- }
- }
- errors = errors[:o]
-
- return parsed, errors
-}
-
-// scanImports returns the set of all import paths from all
-// import specs in the specified files.
-func scanImports(files []*ast.File) map[string]bool {
- imports := make(map[string]bool)
- for _, f := range files {
- for _, decl := range f.Decls {
- if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT {
- for _, spec := range decl.Specs {
- spec := spec.(*ast.ImportSpec)
-
- // NB: do not assume the program is well-formed!
- path, err := strconv.Unquote(spec.Path.Value)
- if err != nil {
- continue // quietly ignore the error
- }
- if path == "C" {
- continue // skip pseudopackage
- }
- imports[path] = true
- }
- }
- }
- }
- return imports
-}
-
-// ---------- Internal helpers ----------
-
-// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos)
-func tokenFileContainsPos(f *token.File, pos token.Pos) bool {
- p := int(pos)
- base := f.Base()
- return base <= p && p < base+f.Size()
-}
diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go
new file mode 100644
index 00000000..4f5a1a14
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/doc.go
@@ -0,0 +1,269 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package packages loads Go packages for inspection and analysis.
+
+NOTE: THIS PACKAGE IS NOT YET READY FOR WIDESPREAD USE:
+ - The interface is still being revised and is likely to change.
+ - The implementation depends on the Go 1.11 go command.
+ - We intend to finalize the API before Go 1.11 is released.
+
+The Load function takes as input a list of patterns and return a list of Package
+structs describing individual packages matched by those patterns.
+The LoadMode controls the amounts of detail about the loaded packages.
+
+The patterns are used as arguments to the underlying build tool,
+such as the go command or Bazel, and are interpreted according to
+that tool's conventions.
+
+The Package struct provides basic information about the package, including
+
+ - ID, a unique identifier for the package in the returned set;
+ - GoFiles, the names of the package's Go source files;
+ - Imports, a map from source import strings to the Packages they name;
+ - Types, the type information for the package's exported symbols;
+ - Syntax, the parsed syntax trees for the package's source code; and
+ - TypeInfo, the result of a complete type-check of the package syntax trees.
+
+(See the documentation for type Package for the complete list of fields
+and more detailed descriptions.)
+
+For example,
+
+ Load(nil, "bytes", "unicode...")
+
+returns four Package structs describing the standard library packages
+bytes, unicode, unicode/utf16, and unicode/utf8. Note that one pattern
+can match multiple packages and that a package might be matched by
+multiple patterns: in general it is not possible to determine which
+packages correspond to which patterns.
+
+Note that the list returned by Load (LoadAllSyntax in this case)
+only contains the packages matched by the patterns. Their dependencies
+can be found by walking the import graph using the Imports fields.
+
+The Load function can be configured by passing a non-nil Config struct as
+the first argument. If you pass nil for the Config Load will
+run in LoadAllSyntax mode, collecting the maximal amount of information
+it can.
+See the documentation for type Config for details.
+
+As noted earlier, the Config.Mode controls increasing amounts of detail
+about the loaded packages, with each mode returning all the data of the
+previous mode with some extra added. See the documentation for type LoadMode
+for details.
+
+Most tools should pass their command-line arguments (after any flags)
+uninterpreted to the loader, so that the loader can interpret them
+according to the conventions of the underlying build system.
+For example, this program prints the names of the source files
+for each package listed on the command line:
+
+ package main
+
+ import (
+ "flag"
+ "fmt"
+ "log"
+
+ "golang.org/x/tools/go/packages"
+ )
+
+ func main() {
+ flag.Parse()
+ pkgs, err := packages.Load(nil, flag.Args()...)
+ if err != nil {
+ log.Fatal(err)
+ }
+ for _, pkg := range pkgs {
+ fmt.Print(pkg.ID, pkg.GoFiles)
+ }
+ }
+*/
+package packages // import "golang.org/x/tools/go/packages"
+
+/*
+
+Motivation and design considerations
+
+The new package's design solves problems addressed by two existing
+packages: go/build, which locates and describes packages, and
+golang.org/x/tools/go/loader, which loads, parses and type-checks them.
+The go/build.Package structure encodes too much of the 'go build' way
+of organizing projects, leaving us in need of a data type that describes a
+package of Go source code independent of the underlying build system.
+We wanted something that works equally well with go build and vgo, and
+also other build systems such as Bazel and Blaze, making it possible to
+construct analysis tools that work in all these environments.
+Tools such as errcheck and staticcheck were essentially unavailable to
+the Go community at Google, and some of Google's internal tools for Go
+are unavailable externally.
+This new package provides a uniform way to obtain package metadata by
+querying each of these build systems, optionally supporting their
+preferred command-line notations for packages, so that tools integrate
+neatly with users' build environments. The Metadata query function
+executes an external query tool appropriate to the current workspace.
+
+Loading packages always returns the complete import graph "all the way down",
+even if all you want is information about a single package, because the query
+mechanisms of all the build systems we currently support ({go,vgo} list, and
+blaze/bazel aspect-based query) cannot provide detailed information
+about one package without visiting all its dependencies too, so there is
+no additional asymptotic cost to providing transitive information.
+(This property might not be true of a hypothetical 5th build system.)
+
+This package provides no parse-but-don't-typecheck operation because most tools
+that need only untyped syntax (such as gofmt, goimports, and golint)
+seem not to care about any files other than the ones they are directly
+instructed to look at. Also, it is trivial for a client to supplement
+this functionality on top of a Metadata query.
+
+In calls to TypeCheck, all initial packages, and any package that
+transitively depends on one of them, must be loaded from source.
+Consider A->B->C->D->E: if A,C are initial, A,B,C must be loaded from
+source; D may be loaded from export data, and E may not be loaded at all
+(though it's possible that D's export data mentions it, so a
+types.Package may be created for it and exposed.)
+
+The old loader had a feature to suppress type-checking of function
+bodies on a per-package basis, primarily intended to reduce the work of
+obtaining type information for imported packages. Now that imports are
+satisfied by export data, the optimization no longer seems necessary.
+
+Despite some early attempts, the old loader did not exploit export data,
+instead always using the equivalent of WholeProgram mode. This was due
+to the complexity of mixing source and export data packages (now
+resolved by the upward traversal mentioned above), and because export data
+files were nearly always missing or stale. Now that 'go build' supports
+caching, all the underlying build systems can guarantee to produce
+export data in a reasonable (amortized) time.
+
+Test "main" packages synthesized by the build system are now reported as
+first-class packages, avoiding the need for clients (such as go/ssa) to
+reinvent this generation logic.
+
+One way in which go/packages is simpler than the old loader is in its
+treatment of in-package tests. In-package tests are packages that
+consist of all the files of the library under test, plus the test files.
+The old loader constructed in-package tests by a two-phase process of
+mutation called "augmentation": first it would construct and type check
+all the ordinary library packages and type-check the packages that
+depend on them; then it would add more (test) files to the package and
+type-check again. This two-phase approach had four major problems:
+1) in processing the tests, the loader modified the library package,
+ leaving no way for a client application to see both the test
+ package and the library package; one would mutate into the other.
+2) because test files can declare additional methods on types defined in
+ the library portion of the package, the dispatch of method calls in
+ the library portion was affected by the presence of the test files.
+ This should have been a clue that the packages were logically
+ different.
+3) this model of "augmentation" assumed at most one in-package test
+ per library package, which is true of projects using 'go build',
+ but not other build systems.
+4) because of the two-phase nature of test processing, all packages that
+ import the library package had to be processed before augmentation,
+ forcing a "one-shot" API and preventing the client from calling Load
+ in several times in sequence as is now possible in WholeProgram mode.
+ (TypeCheck mode has a similar one-shot restriction for a different reason.)
+
+Early drafts of this package supported "multi-shot" operation
+in the Metadata and WholeProgram modes, although this feature is not exposed
+through the API and will likely be removed.
+Although it allowed clients to make a sequence of calls (or concurrent
+calls) to Load, building up the graph of Packages incrementally,
+it was of marginal value: it complicated the API
+(since it allowed some options to vary across calls but not others),
+it complicated the implementation,
+it cannot be made to work in TypeCheck mode, as explained above,
+and it was less efficient than making one combined call (when this is possible).
+Among the clients we have inspected, none made multiple calls to load
+but could not be easily and satisfactorily modified to make only a single call.
+However, applications changes may be required.
+For example, the ssadump command loads the user-specified packages
+and in addition the runtime package. It is tempting to simply append
+"runtime" to the user-provided list, but that does not work if the user
+specified an ad-hoc package such as [a.go b.go].
+Instead, ssadump no longer requests the runtime package,
+but seeks it among the dependencies of the user-specified packages,
+and emits an error if it is not found.
+
+Overlays: the ParseFile hook in the API permits clients to vary the way
+in which ASTs are obtained from filenames; the default implementation is
+based on parser.ParseFile. This features enables editor-integrated tools
+that analyze the contents of modified but unsaved buffers: rather than
+read from the file system, a tool can read from an archive of modified
+buffers provided by the editor.
+This approach has its limits. Because package metadata is obtained by
+fork/execing an external query command for each build system, we can
+fake only the file contents seen by the parser, type-checker, and
+application, but not by the metadata query, so, for example:
+- additional imports in the fake file will not be described by the
+ metadata, so the type checker will fail to load imports that create
+ new dependencies.
+- in TypeCheck mode, because export data is produced by the query
+ command, it will not reflect the fake file contents.
+- this mechanism cannot add files to a package without first saving them.
+
+Questions & Tasks
+
+- Add GOARCH/GOOS?
+ They are not portable concepts, but could be made portable.
+ Our goal has been to allow users to express themselves using the conventions
+ of the underlying build system: if the build system honors GOARCH
+ during a build and during a metadata query, then so should
+ applications built atop that query mechanism.
+ Conversely, if the target architecture of the build is determined by
+ command-line flags, the application can pass the relevant
+ flags through to the build system using a command such as:
+ myapp -query_flag="--cpu=amd64" -query_flag="--os=darwin"
+ However, this approach is low-level, unwieldy, and non-portable.
+ GOOS and GOARCH seem important enough to warrant a dedicated option.
+
+- How should we handle partial failures such as a mixture of good and
+ malformed patterns, existing and non-existent packages, successful and
+ failed builds, import failures, import cycles, and so on, in a call to
+ Load?
+
+- Do we need a GeneratedBy map that maps the name of each generated Go
+ source file in GoFiles to that of the original file, if known, or "" otherwise?
+ Or are //line directives and "Generated" comments in those files enough?
+
+- Support bazel, blaze, and go1.10 list, not just go1.11 list.
+
+- Handle (and test) various partial success cases, e.g.
+ a mixture of good packages and:
+ invalid patterns
+ nonexistent packages
+ empty packages
+ packages with malformed package or import declarations
+ unreadable files
+ import cycles
+ other parse errors
+ type errors
+ Make sure we record errors at the correct place in the graph.
+
+- Missing packages among initial arguments are not reported.
+ Return bogus packages for them, like golist does.
+
+- "undeclared name" errors (for example) are reported out of source file
+ order. I suspect this is due to the breadth-first resolution now used
+ by go/types. Is that a bug? Discuss with gri.
+
+- https://github.com/golang/go/issues/25980 causes these commands to crash:
+ $ GOPATH=/none ./gopackages -all all
+ due to:
+ $ GOPATH=/none go list -e -test -json all
+ and:
+ $ go list -e -test ./relative/path
+
+- Modify stringer to use go/packages, perhaps initially under flag control.
+
+- Bug: "gopackages fmt a.go" doesn't produce an error.
+
+- If necessary, add back an IsTest boolean or expose ForTests on the Package struct.
+ IsTest was removed because we couldn't agree on a useful definition.
+
+*/
diff --git a/vendor/golang.org/x/tools/go/packages/external.go b/vendor/golang.org/x/tools/go/packages/external.go
new file mode 100644
index 00000000..39e5ed99
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/external.go
@@ -0,0 +1,68 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file enables an external tool to intercept package requests.
+// If the tool is present then its results are used in preference to
+// the go list command.
+
+package packages
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "os/exec"
+ "strings"
+)
+
+// findExternalTool returns the file path of a tool that supplies supplies
+// the build system package structure, or "" if not found."
+// If GOPACKAGESDRIVER is set in the environment findExternalTool returns its
+// value, otherwise it searches for a binary named gopackagesdriver on the PATH.
+func findExternalDriver(cfg *Config) driver {
+ const toolPrefix = "GOPACKAGESDRIVER="
+ tool := ""
+ for _, env := range cfg.Env {
+ if val := strings.TrimPrefix(env, toolPrefix); val != env {
+ tool = val
+ }
+ }
+ if tool != "" && tool == "off" {
+ return nil
+ }
+ if tool == "" {
+ var err error
+ tool, err = exec.LookPath("gopackagesdriver")
+ if err != nil {
+ return nil
+ }
+ }
+ return func(cfg *Config, words ...string) (*driverResponse, error) {
+ buf := new(bytes.Buffer)
+ fullargs := []string{
+ "list",
+ fmt.Sprintf("-test=%t", cfg.Tests),
+ fmt.Sprintf("-export=%t", usesExportData(cfg)),
+ fmt.Sprintf("-deps=%t", cfg.Mode >= LoadImports),
+ }
+ for _, f := range cfg.Flags {
+ fullargs = append(fullargs, fmt.Sprintf("-flags=%v", f))
+ }
+ fullargs = append(fullargs, "--")
+ fullargs = append(fullargs, words...)
+ cmd := exec.CommandContext(cfg.Context, tool, fullargs...)
+ cmd.Env = cfg.Env
+ cmd.Dir = cfg.Dir
+ cmd.Stdout = buf
+ cmd.Stderr = new(bytes.Buffer)
+ if err := cmd.Run(); err != nil {
+ return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
+ }
+ var response driverResponse
+ if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
+ return nil, err
+ }
+ return &response, nil
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go
new file mode 100644
index 00000000..26d62771
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist.go
@@ -0,0 +1,337 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+)
+
+// A goTooOldError reports that the go command
+// found by exec.LookPath is too old to use the new go list behavior.
+type goTooOldError struct {
+ error
+}
+
+// goListDriver uses the go list command to interpret the patterns and produce
+// the build system package structure.
+// See driver for more details.
+func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
+ // Determine files requested in contains patterns
+ var containFiles []string
+ restPatterns := make([]string, 0, len(patterns))
+ for _, pattern := range patterns {
+ if strings.HasPrefix(pattern, "contains:") {
+ containFile := strings.TrimPrefix(pattern, "contains:")
+ containFiles = append(containFiles, containFile)
+ } else {
+ restPatterns = append(restPatterns, pattern)
+ }
+ }
+ containFiles = absJoin(cfg.Dir, containFiles)
+ patterns = restPatterns
+
+ // TODO(matloob): Remove the definition of listfunc and just use golistPackages once go1.12 is released.
+ var listfunc driver
+ listfunc = func(cfg *Config, words ...string) (*driverResponse, error) {
+ response, err := golistDriverCurrent(cfg, patterns...)
+ if _, ok := err.(goTooOldError); ok {
+ listfunc = golistDriverFallback
+ return listfunc(cfg, patterns...)
+ }
+ listfunc = golistDriverCurrent
+ return response, err
+ }
+
+ var response *driverResponse
+ var err error
+
+ // see if we have any patterns to pass through to go list.
+ if len(patterns) > 0 {
+ response, err = listfunc(cfg, patterns...)
+ if err != nil {
+ return nil, err
+ }
+ } else {
+ response = &driverResponse{}
+ }
+
+ // Run go list for contains: patterns.
+ seenPkgs := make(map[string]*Package) // for deduplication. different containing queries could produce same packages
+ if len(containFiles) > 0 {
+ for _, pkg := range response.Packages {
+ seenPkgs[pkg.ID] = pkg
+ }
+ }
+ for _, f := range containFiles {
+ // TODO(matloob): Do only one query per directory.
+ fdir := filepath.Dir(f)
+ cfg.Dir = fdir
+ dirResponse, err := listfunc(cfg, ".")
+ if err != nil {
+ return nil, err
+ }
+ isRoot := make(map[string]bool, len(dirResponse.Roots))
+ for _, root := range dirResponse.Roots {
+ isRoot[root] = true
+ }
+ for _, pkg := range dirResponse.Packages {
+ // Add any new packages to the main set
+ // We don't bother to filter packages that will be dropped by the changes of roots,
+ // that will happen anyway during graph construction outside this function.
+ // Over-reporting packages is not a problem.
+ if _, ok := seenPkgs[pkg.ID]; !ok {
+ // it is a new package, just add it
+ seenPkgs[pkg.ID] = pkg
+ response.Packages = append(response.Packages, pkg)
+ }
+ // if the package was not a root one, it cannot have the file
+ if !isRoot[pkg.ID] {
+ continue
+ }
+ for _, pkgFile := range pkg.GoFiles {
+ if filepath.Base(f) == filepath.Base(pkgFile) {
+ response.Roots = append(response.Roots, pkg.ID)
+ break
+ }
+ }
+ }
+ }
+ return response, nil
+}
+
+// Fields must match go list;
+// see $GOROOT/src/cmd/go/internal/load/pkg.go.
+type jsonPackage struct {
+ ImportPath string
+ Dir string
+ Name string
+ Export string
+ GoFiles []string
+ CompiledGoFiles []string
+ CFiles []string
+ CgoFiles []string
+ CXXFiles []string
+ MFiles []string
+ HFiles []string
+ FFiles []string
+ SFiles []string
+ SwigFiles []string
+ SwigCXXFiles []string
+ SysoFiles []string
+ Imports []string
+ ImportMap map[string]string
+ Deps []string
+ TestGoFiles []string
+ TestImports []string
+ XTestGoFiles []string
+ XTestImports []string
+ ForTest string // q in a "p [q.test]" package, else ""
+ DepOnly bool
+}
+
+func otherFiles(p *jsonPackage) [][]string {
+ return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles}
+}
+
+// golistDriverCurrent uses the "go list" command to expand the
+// pattern words and return metadata for the specified packages.
+// dir may be "" and env may be nil, as per os/exec.Command.
+func golistDriverCurrent(cfg *Config, words ...string) (*driverResponse, error) {
+ // go list uses the following identifiers in ImportPath and Imports:
+ //
+ // "p" -- importable package or main (command)
+ // "q.test" -- q's test executable
+ // "p [q.test]" -- variant of p as built for q's test executable
+ // "q_test [q.test]" -- q's external test package
+ //
+ // The packages p that are built differently for a test q.test
+ // are q itself, plus any helpers used by the external test q_test,
+ // typically including "testing" and all its dependencies.
+
+ // Run "go list" for complete
+ // information on the specified packages.
+ buf, err := golist(cfg, golistargs(cfg, words))
+ if err != nil {
+ return nil, err
+ }
+ // Decode the JSON and convert it to Package form.
+ var response driverResponse
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ // Bad package?
+ if p.Name == "" {
+ // This could be due to:
+ // - no such package
+ // - package directory contains no Go source files
+ // - all package declarations are mangled
+ // - and possibly other things.
+ //
+ // For now, we throw it away and let later
+ // stages rediscover the problem, but this
+ // discards the error message computed by go list
+ // and computes a new one---by different logic:
+ // if only one of the package declarations is
+ // bad, for example, should we report an error
+ // in Metadata mode?
+ // Unless we parse and typecheck, we might not
+ // notice there's a problem.
+ //
+ // Perhaps we should save a map of PackageID to
+ // errors for such cases.
+ continue
+ }
+
+ id := p.ImportPath
+
+ // Extract the PkgPath from the package's ID.
+ pkgpath := id
+ if i := strings.IndexByte(id, ' '); i >= 0 {
+ pkgpath = id[:i]
+ }
+
+ if pkgpath == "unsafe" {
+ p.GoFiles = nil // ignore fake unsafe.go file
+ }
+
+ // Assume go list emits only absolute paths for Dir.
+ if !filepath.IsAbs(p.Dir) {
+ log.Fatalf("internal error: go list returned non-absolute Package.Dir: %s", p.Dir)
+ }
+
+ export := p.Export
+ if export != "" && !filepath.IsAbs(export) {
+ export = filepath.Join(p.Dir, export)
+ }
+
+ // imports
+ //
+ // Imports contains the IDs of all imported packages.
+ // ImportsMap records (path, ID) only where they differ.
+ ids := make(map[string]bool)
+ for _, id := range p.Imports {
+ ids[id] = true
+ }
+ imports := make(map[string]*Package)
+ for path, id := range p.ImportMap {
+ imports[path] = &Package{ID: id} // non-identity import
+ delete(ids, id)
+ }
+ for id := range ids {
+ if id == "C" {
+ continue
+ }
+
+ imports[id] = &Package{ID: id} // identity import
+ }
+ if !p.DepOnly {
+ response.Roots = append(response.Roots, id)
+ }
+ pkg := &Package{
+ ID: id,
+ Name: p.Name,
+ PkgPath: pkgpath,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
+ CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ Imports: imports,
+ ExportFile: export,
+ }
+ // TODO(matloob): Temporary hack since CompiledGoFiles isn't always set.
+ if len(pkg.CompiledGoFiles) == 0 {
+ pkg.CompiledGoFiles = pkg.GoFiles
+ }
+ response.Packages = append(response.Packages, pkg)
+ }
+
+ return &response, nil
+}
+
+// absJoin absolutizes and flattens the lists of files.
+func absJoin(dir string, fileses ...[]string) (res []string) {
+ for _, files := range fileses {
+ for _, file := range files {
+ if !filepath.IsAbs(file) {
+ file = filepath.Join(dir, file)
+ }
+ res = append(res, file)
+ }
+ }
+ return res
+}
+
+func golistargs(cfg *Config, words []string) []string {
+ fullargs := []string{
+ "list", "-e", "-json", "-compiled",
+ fmt.Sprintf("-test=%t", cfg.Tests),
+ fmt.Sprintf("-export=%t", usesExportData(cfg)),
+ fmt.Sprintf("-deps=%t", cfg.Mode >= LoadImports),
+ }
+ fullargs = append(fullargs, cfg.Flags...)
+ fullargs = append(fullargs, "--")
+ fullargs = append(fullargs, words...)
+ return fullargs
+}
+
+// golist returns the JSON-encoded result of a "go list args..." query.
+func golist(cfg *Config, args []string) (*bytes.Buffer, error) {
+ out := new(bytes.Buffer)
+ cmd := exec.CommandContext(cfg.Context, "go", args...)
+ cmd.Env = cfg.Env
+ cmd.Dir = cfg.Dir
+ cmd.Stdout = out
+ cmd.Stderr = new(bytes.Buffer)
+ if err := cmd.Run(); err != nil {
+ exitErr, ok := err.(*exec.ExitError)
+ if !ok {
+ // Catastrophic error:
+ // - executable not found
+ // - context cancellation
+ return nil, fmt.Errorf("couldn't exec 'go list': %s %T", err, err)
+ }
+
+ // Old go list?
+ if strings.Contains(fmt.Sprint(cmd.Stderr), "flag provided but not defined") {
+ return nil, goTooOldError{fmt.Errorf("unsupported version of go list: %s: %s", exitErr, cmd.Stderr)}
+ }
+
+ // Export mode entails a build.
+ // If that build fails, errors appear on stderr
+ // (despite the -e flag) and the Export field is blank.
+ // Do not fail in that case.
+ if !usesExportData(cfg) {
+ return nil, fmt.Errorf("go list: %s: %s", exitErr, cmd.Stderr)
+ }
+ }
+
+ // Print standard error output from "go list".
+ // Due to the -e flag, this should be empty.
+ // However, in -export mode it contains build errors.
+ // Should go list save build errors in the Package.Error JSON field?
+ // See https://github.com/golang/go/issues/26319.
+ // If so, then we should continue to print stderr as go list
+ // will be silent unless something unexpected happened.
+ // If not, perhaps we should suppress it to reduce noise.
+ if stderr := fmt.Sprint(cmd.Stderr); stderr != "" {
+ fmt.Fprintf(os.Stderr, "go list stderr <<%s>>\n", stderr)
+ }
+
+ // debugging
+ if false {
+ fmt.Fprintln(os.Stderr, out)
+ }
+
+ return out, nil
+}
diff --git a/vendor/golang.org/x/tools/go/packages/golist_fallback.go b/vendor/golang.org/x/tools/go/packages/golist_fallback.go
new file mode 100644
index 00000000..331bb655
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/golist_fallback.go
@@ -0,0 +1,282 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+import (
+ "encoding/json"
+ "fmt"
+
+ "go/build"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "golang.org/x/tools/go/internal/cgo"
+)
+
+// TODO(matloob): Delete this file once Go 1.12 is released.
+
+// This file provides backwards compatibility support for
+// loading for versions of Go earlier than 1.10.4. This support is meant to
+// assist with migration to the Package API until there's
+// widespread adoption of these newer Go versions.
+// This support will be removed once Go 1.12 is released
+// in Q1 2019.
+
+func golistDriverFallback(cfg *Config, words ...string) (*driverResponse, error) {
+ original, deps, err := getDeps(cfg, words...)
+ if err != nil {
+ return nil, err
+ }
+
+ var tmpdir string // used for generated cgo files
+
+ var response driverResponse
+ addPackage := func(p *jsonPackage) {
+ if p.Name == "" {
+ return
+ }
+
+ id := p.ImportPath
+ isRoot := original[id] != nil
+ pkgpath := id
+
+ if pkgpath == "unsafe" {
+ p.GoFiles = nil // ignore fake unsafe.go file
+ }
+
+ importMap := func(importlist []string) map[string]*Package {
+ importMap := make(map[string]*Package)
+ for _, id := range importlist {
+
+ if id == "C" {
+ for _, path := range []string{"unsafe", "syscall", "runtime/cgo"} {
+ if pkgpath != path && importMap[path] == nil {
+ importMap[path] = &Package{ID: path}
+ }
+ }
+ continue
+ }
+ importMap[vendorlessPath(id)] = &Package{ID: id}
+ }
+ return importMap
+ }
+ compiledGoFiles := absJoin(p.Dir, p.GoFiles)
+ // Use a function to simplify control flow. It's just a bunch of gotos.
+ var cgoErrors []error
+ processCgo := func() bool {
+ // Suppress any cgo errors. Any relevant errors will show up in typechecking.
+ // TODO(matloob): Skip running cgo if Mode < LoadTypes.
+ if tmpdir == "" {
+ if tmpdir, err = ioutil.TempDir("", "gopackages"); err != nil {
+ cgoErrors = append(cgoErrors, err)
+ return false
+ }
+ }
+ outdir := filepath.Join(tmpdir, strings.Replace(p.ImportPath, "/", "_", -1))
+ if err := os.Mkdir(outdir, 0755); err != nil {
+ cgoErrors = append(cgoErrors, err)
+ return false
+ }
+ files, _, err := runCgo(p.Dir, outdir, cfg.Env)
+ if err != nil {
+ cgoErrors = append(cgoErrors, err)
+ return false
+ }
+ compiledGoFiles = append(compiledGoFiles, files...)
+ return true
+ }
+ if len(p.CgoFiles) == 0 || !processCgo() {
+ compiledGoFiles = append(compiledGoFiles, absJoin(p.Dir, p.CgoFiles)...) // Punt to typechecker.
+ }
+ if isRoot {
+ response.Roots = append(response.Roots, id)
+ }
+ response.Packages = append(response.Packages, &Package{
+ ID: id,
+ Name: p.Name,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
+ CompiledGoFiles: compiledGoFiles,
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ PkgPath: pkgpath,
+ Imports: importMap(p.Imports),
+ // TODO(matloob): set errors on the Package to cgoErrors
+ })
+ if cfg.Tests {
+ testID := fmt.Sprintf("%s [%s.test]", id, id)
+ if len(p.TestGoFiles) > 0 || len(p.XTestGoFiles) > 0 {
+ if isRoot {
+ response.Roots = append(response.Roots, testID)
+ }
+ response.Packages = append(response.Packages, &Package{
+ ID: testID,
+ Name: p.Name,
+ GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles, p.TestGoFiles),
+ CompiledGoFiles: append(compiledGoFiles, absJoin(p.Dir, p.TestGoFiles)...),
+ OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ PkgPath: pkgpath,
+ Imports: importMap(append(p.Imports, p.TestImports...)),
+ // TODO(matloob): set errors on the Package to cgoErrors
+ })
+ if len(p.XTestGoFiles) > 0 {
+ xtestID := fmt.Sprintf("%s_test [%s.test]", id, id)
+ if isRoot {
+ response.Roots = append(response.Roots, xtestID)
+ }
+ for i, imp := range p.XTestImports {
+ if imp == p.ImportPath {
+ p.XTestImports[i] = testID
+ break
+ }
+ }
+ response.Packages = append(response.Packages, &Package{
+ ID: xtestID,
+ Name: p.Name + "_test",
+ GoFiles: absJoin(p.Dir, p.XTestGoFiles),
+ CompiledGoFiles: absJoin(p.Dir, p.XTestGoFiles),
+ PkgPath: pkgpath,
+ Imports: importMap(p.XTestImports),
+ })
+ }
+ }
+ }
+ }
+
+ for _, pkg := range original {
+ addPackage(pkg)
+ }
+ if cfg.Mode < LoadImports || len(deps) == 0 {
+ return &response, nil
+ }
+
+ buf, err := golist(cfg, golistArgsFallback(cfg, deps))
+ if err != nil {
+ return nil, err
+ }
+
+ // Decode the JSON and convert it to Package form.
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ addPackage(p)
+ }
+
+ return &response, nil
+}
+
+// vendorlessPath returns the devendorized version of the import path ipath.
+// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
+// Copied from golang.org/x/tools/imports/fix.go.
+func vendorlessPath(ipath string) string {
+ // Devendorize for use in import statement.
+ if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
+ return ipath[i+len("/vendor/"):]
+ }
+ if strings.HasPrefix(ipath, "vendor/") {
+ return ipath[len("vendor/"):]
+ }
+ return ipath
+}
+
+// getDeps runs an initial go list to determine all the dependency packages.
+func getDeps(cfg *Config, words ...string) (originalSet map[string]*jsonPackage, deps []string, err error) {
+ buf, err := golist(cfg, golistArgsFallback(cfg, words))
+ if err != nil {
+ return nil, nil, err
+ }
+
+ depsSet := make(map[string]bool)
+ originalSet = make(map[string]*jsonPackage)
+ var testImports []string
+
+ // Extract deps from the JSON.
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+
+ originalSet[p.ImportPath] = p
+ for _, dep := range p.Deps {
+ depsSet[dep] = true
+ }
+ if cfg.Tests {
+ // collect the additional imports of the test packages.
+ pkgTestImports := append(p.TestImports, p.XTestImports...)
+ for _, imp := range pkgTestImports {
+ if depsSet[imp] {
+ continue
+ }
+ depsSet[imp] = true
+ testImports = append(testImports, imp)
+ }
+ }
+ }
+ // Get the deps of the packages imported by tests.
+ if len(testImports) > 0 {
+ buf, err = golist(cfg, golistArgsFallback(cfg, testImports))
+ if err != nil {
+ return nil, nil, err
+ }
+ // Extract deps from the JSON.
+ for dec := json.NewDecoder(buf); dec.More(); {
+ p := new(jsonPackage)
+ if err := dec.Decode(p); err != nil {
+ return nil, nil, fmt.Errorf("JSON decoding failed: %v", err)
+ }
+ for _, dep := range p.Deps {
+ depsSet[dep] = true
+ }
+ }
+ }
+
+ for orig := range originalSet {
+ delete(depsSet, orig)
+ }
+
+ deps = make([]string, 0, len(depsSet))
+ for dep := range depsSet {
+ deps = append(deps, dep)
+ }
+ sort.Strings(deps) // ensure output is deterministic
+ return originalSet, deps, nil
+}
+
+func golistArgsFallback(cfg *Config, words []string) []string {
+ fullargs := []string{"list", "-e", "-json"}
+ fullargs = append(fullargs, cfg.Flags...)
+ fullargs = append(fullargs, "--")
+ fullargs = append(fullargs, words...)
+ return fullargs
+}
+
+func runCgo(pkgdir, tmpdir string, env []string) (files, displayfiles []string, err error) {
+ // Use go/build to open cgo files and determine the cgo flags, etc, from them.
+ // This is tricky so it's best to avoid reimplementing as much as we can, and
+ // we plan to delete this support once Go 1.12 is released anyways.
+ // TODO(matloob): This isn't completely correct because we're using the Default
+ // context. Perhaps we should more accurately fill in the context.
+ bp, err := build.ImportDir(pkgdir, build.ImportMode(0))
+ if err != nil {
+ return nil, nil, err
+ }
+ for _, ev := range env {
+ if v := strings.TrimPrefix(ev, "CGO_CPPFLAGS"); v != ev {
+ bp.CgoCPPFLAGS = append(bp.CgoCPPFLAGS, strings.Fields(v)...)
+ } else if v := strings.TrimPrefix(ev, "CGO_CFLAGS"); v != ev {
+ bp.CgoCFLAGS = append(bp.CgoCFLAGS, strings.Fields(v)...)
+ } else if v := strings.TrimPrefix(ev, "CGO_CXXFLAGS"); v != ev {
+ bp.CgoCXXFLAGS = append(bp.CgoCXXFLAGS, strings.Fields(v)...)
+ } else if v := strings.TrimPrefix(ev, "CGO_LDFLAGS"); v != ev {
+ bp.CgoLDFLAGS = append(bp.CgoLDFLAGS, strings.Fields(v)...)
+ }
+ }
+ return cgo.Run(bp, pkgdir, tmpdir, true)
+}
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
new file mode 100644
index 00000000..7e8e4e2e
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -0,0 +1,824 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package packages
+
+// See doc.go for package documentation and implementation notes.
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "log"
+ "os"
+ "sync"
+
+ "golang.org/x/tools/go/gcexportdata"
+)
+
+// A LoadMode specifies the amount of detail to return when loading packages.
+// The modes are all strictly additive, as you go through the list it increases
+// the amount of information available to you, but may also increase the cost
+// of collecting the information.
+// Load is always allowed to return more information than requested.
+type LoadMode int
+
+const (
+ // LoadFiles finds the packages and computes their source file lists.
+ // Package fields: ID, Name, Errors, GoFiles, OtherFiles.
+ LoadFiles LoadMode = iota
+
+ // LoadImports adds import information for each package
+ // and its dependencies.
+ // Package fields added: Imports.
+ LoadImports
+
+ // LoadTypes adds type information for the package's exported symbols.
+ // Package fields added: Types, Fset, IllTyped.
+ // This will use the type information provided by the build system if
+ // possible, and the ExportFile field may be filled in.
+ LoadTypes
+
+ // LoadSyntax adds typed syntax trees for the packages matching the patterns.
+ // Package fields added: Syntax, TypesInfo, for direct pattern matches only.
+ LoadSyntax
+
+ // LoadAllSyntax adds typed syntax trees for the packages matching the patterns
+ // and all dependencies.
+ // Package fields added: Syntax, TypesInfo, for all packages in import graph.
+ LoadAllSyntax
+)
+
+// An Config specifies details about how packages should be loaded.
+// Calls to Load do not modify this struct.
+type Config struct {
+ // Mode controls the level of information returned for each package.
+ Mode LoadMode
+
+ // Context specifies the context for the load operation.
+ // If the context is cancelled, the loader may stop early
+ // and return an ErrCancelled error.
+ // If Context is nil, the load cannot be cancelled.
+ Context context.Context
+
+ // Dir is the directory in which to run the build system tool
+ // that provides information about the packages.
+ // If Dir is empty, the tool is run in the current directory.
+ Dir string
+
+ // Env is the environment to use when invoking the build system tool.
+ // If Env is nil, the current environment is used.
+ // Like in os/exec's Cmd, only the last value in the slice for
+ // each environment key is used. To specify the setting of only
+ // a few variables, append to the current environment, as in:
+ //
+ // opt.Env = append(os.Environ(), "GOOS=plan9", "GOARCH=386")
+ //
+ Env []string
+
+ // Flags is a list of command-line flags to be passed through to
+ // the underlying query tool.
+ Flags []string
+
+ // Error is called for each error encountered during package loading.
+ // It must be safe to call Error simultaneously from multiple goroutines.
+ // In addition to calling Error, the loader will record each error
+ // in the corresponding Package's Errors list.
+ // If Error is nil, the loader will print errors to os.Stderr.
+ // To disable printing of errors, set opt.Error = func(error){}.
+ // TODO(rsc): What happens in the Metadata loader? Currently nothing.
+ Error func(error)
+
+ // Fset is the token.FileSet to use when parsing source files or
+ // type information provided by the build system.
+ // If Fset is nil, the loader will create one.
+ Fset *token.FileSet
+
+ // ParseFile is called to read and parse each file
+ // when preparing a package's type-checked syntax tree.
+ // It must be safe to call ParseFile simultaneously from multiple goroutines.
+ // If ParseFile is nil, the loader will uses parser.ParseFile.
+ //
+ // Setting ParseFile to a custom implementation can allow
+ // providing alternate file content in order to type-check
+ // unsaved text editor buffers, or to selectively eliminate
+ // unwanted function bodies to reduce the amount of work
+ // done by the type checker.
+ ParseFile func(fset *token.FileSet, filename string) (*ast.File, error)
+
+ // If Tests is set, the loader includes not just the packages
+ // matching a particular pattern but also any related test packages,
+ // including test-only variants of the package and the test executable.
+ //
+ // For example, when using the go command, loading "fmt" with Tests=true
+ // returns four packages, with IDs "fmt" (the standard package),
+ // "fmt [fmt.test]" (the package as compiled for the test),
+ // "fmt_test" (the test functions from source files in package fmt_test),
+ // and "fmt.test" (the test binary).
+ //
+ // In build systems with explicit names for tests,
+ // setting Tests may have no effect.
+ Tests bool
+
+ // TypeChecker provides additional configuration for type-checking syntax trees.
+ //
+ // It is used for all packages in LoadAllSyntax mode,
+ // and for the packages matching the patterns, but not their dependencies,
+ // in LoadSyntax mode.
+ //
+ // The TypeChecker.Error function is ignored:
+ // errors are reported using the Error function defined above.
+ //
+ // The TypeChecker.Importer function is ignored:
+ // the loader defines an appropriate importer.
+ //
+ // TODO(rsc): TypeChecker.Sizes should use the same sizes as the main build.
+ // Derive them from the runtime?
+ TypeChecker types.Config
+}
+
+// driver is the type for functions that query the build system for the
+// packages named by the patterns.
+type driver func(cfg *Config, patterns ...string) (*driverResponse, error)
+
+// driverResponse contains the results for a driver query.
+type driverResponse struct {
+ // Roots is the set of package IDs that make up the root packages.
+ // We have to encode this separately because when we encode a single package
+ // we cannot know if it is one of the roots as that requires knowledge of the
+ // graph it is part of.
+ Roots []string `json:",omitempty"`
+
+ // Packages is the full set of packages in the graph.
+ // The packages are not connected into a graph.
+ // The Imports if populated will be stubs that only have their ID set.
+ // Imports will be connected and then type and syntax information added in a
+ // later pass (see refine).
+ Packages []*Package
+}
+
+// Load loads and returns the Go packages named by the given patterns.
+//
+// Config specifies loading options;
+// nil behaves the same as an empty Config.
+//
+// Load returns an error if any of the patterns was invalid
+// as defined by the underlying build system.
+// It may return an empty list of packages without an error,
+// for instance for an empty expansion of a valid wildcard.
+func Load(cfg *Config, patterns ...string) ([]*Package, error) {
+ l := newLoader(cfg)
+ response, err := defaultDriver(&l.Config, patterns...)
+ if err != nil {
+ return nil, err
+ }
+ return l.refine(response.Roots, response.Packages...)
+}
+
+// defaultDriver is a driver that looks for an external driver binary, and if
+// it does not find it falls back to the built in go list driver.
+func defaultDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
+ driver := findExternalDriver(cfg)
+ if driver == nil {
+ driver = goListDriver
+ }
+ return driver(cfg, patterns...)
+}
+
+// A Package describes a single loaded Go package.
+type Package struct {
+ // ID is a unique identifier for a package,
+ // in a syntax provided by the underlying build system.
+ //
+ // Because the syntax varies based on the build system,
+ // clients should treat IDs as opaque and not attempt to
+ // interpret them.
+ ID string
+
+ // Name is the package name as it appears in the package source code.
+ Name string
+
+ // This is the package path as used by the types package.
+ // This is used to map entries in the type information back to the package
+ // they come from.
+ PkgPath string
+
+ // Errors lists any errors encountered while loading the package.
+ // TODO(rsc): Say something about the errors or at least their Strings,
+ // as far as file:line being at the beginning and so on.
+ Errors []error
+
+ // GoFiles lists the absolute file paths of the package's Go source files.
+ GoFiles []string
+
+ // CompiledGoFiles lists the absolute file paths of the package's source
+ // files that were presented to the compiler.
+ // This may differ from GoFiles if files are processed before compilation.
+ CompiledGoFiles []string
+
+ // OtherFiles lists the absolute file paths of the package's non-Go source files,
+ // including assembly, C, C++, Fortran, Objective-C, SWIG, and so on.
+ OtherFiles []string
+
+ // ExportFile is the absolute path to a file containing the type information
+ // provided by the build system.
+ ExportFile string
+
+ // Imports maps import paths appearing in the package's Go source files
+ // to corresponding loaded Packages.
+ Imports map[string]*Package
+
+ // Types is the type information for the package.
+ // Modes LoadTypes and above set this field for all packages.
+ //
+ // TODO(adonovan): all packages? In Types mode this entails
+ // asymptotically more export data processing than is required
+ // to load the requested packages. Is that what we want?
+ Types *types.Package
+
+ // Fset provides position information for Types, TypesInfo, and Syntax.
+ // Modes LoadTypes and above set this field for all packages.
+ Fset *token.FileSet
+
+ // IllTyped indicates whether the package has any type errors.
+ // Modes LoadTypes and above set this field for all packages.
+ IllTyped bool
+
+ // Syntax is the package's syntax trees, for the files listed in GoFiles.
+ //
+ // Mode LoadSyntax set this field for packages matching the patterns.
+ // Mode LoadSyntaxAll sets this field for all packages, including dependencies.
+ Syntax []*ast.File
+
+ // TypesInfo is the type-checking results for the package's syntax trees.
+ // It is set only when Syntax is set.
+ TypesInfo *types.Info
+}
+
+// packageError is used to serialize structured errors as much as possible.
+// This has members compatible with the golist error type, and possibly some
+// more if we need other error information to survive.
+type packageError struct {
+ Pos string // position of error
+ Err string // the error itself
+}
+
+func (e *packageError) Error() string {
+ return e.Pos + ": " + e.Err
+}
+
+// flatPackage is the JSON form of Package
+// It drops all the type and syntax fields, and transforms the Imports and Errors
+type flatPackage struct {
+ ID string
+ Name string `json:",omitempty"`
+ PkgPath string `json:",omitempty"`
+ Errors []*packageError `json:",omitempty"`
+ GoFiles []string `json:",omitempty"`
+ CompiledGoFiles []string `json:",omitempty"`
+ OtherFiles []string `json:",omitempty"`
+ ExportFile string `json:",omitempty"`
+ Imports map[string]string `json:",omitempty"`
+}
+
+// MarshalJSON returns the Package in its JSON form.
+// For the most part, the structure fields are written out unmodified, and
+// the type and syntax fields are skipped.
+// The imports are written out as just a map of path to package id.
+// The errors are written using a custom type that tries to preserve the
+// structure of error types we know about.
+// This method exists to enable support for additional build systems. It is
+// not intended for use by clients of the API and we may change the format.
+func (p *Package) MarshalJSON() ([]byte, error) {
+ flat := &flatPackage{
+ ID: p.ID,
+ Name: p.Name,
+ PkgPath: p.PkgPath,
+ GoFiles: p.GoFiles,
+ CompiledGoFiles: p.CompiledGoFiles,
+ OtherFiles: p.OtherFiles,
+ ExportFile: p.ExportFile,
+ }
+ if len(p.Errors) > 0 {
+ flat.Errors = make([]*packageError, len(p.Errors))
+ for i, err := range p.Errors {
+ //TODO: best effort mapping of errors to the serialized form
+ switch err := err.(type) {
+ case *packageError:
+ flat.Errors[i] = err
+ default:
+ flat.Errors[i] = &packageError{Err: err.Error()}
+ }
+ }
+ }
+ if len(p.Imports) > 0 {
+ flat.Imports = make(map[string]string, len(p.Imports))
+ for path, ipkg := range p.Imports {
+ flat.Imports[path] = ipkg.ID
+ }
+ }
+ return json.Marshal(flat)
+}
+
+// UnmarshalJSON reads in a Package from its JSON format.
+// See MarshalJSON for details about the format accepted.
+func (p *Package) UnmarshalJSON(b []byte) error {
+ flat := &flatPackage{}
+ if err := json.Unmarshal(b, &flat); err != nil {
+ return err
+ }
+ *p = Package{
+ ID: flat.ID,
+ Name: flat.Name,
+ PkgPath: flat.PkgPath,
+ GoFiles: flat.GoFiles,
+ CompiledGoFiles: flat.CompiledGoFiles,
+ OtherFiles: flat.OtherFiles,
+ ExportFile: flat.ExportFile,
+ }
+ if len(flat.Errors) > 0 {
+ p.Errors = make([]error, len(flat.Errors))
+ for i, err := range flat.Errors {
+ p.Errors[i] = err
+ }
+ }
+ if len(flat.Imports) > 0 {
+ p.Imports = make(map[string]*Package, len(flat.Imports))
+ for path, id := range flat.Imports {
+ p.Imports[path] = &Package{ID: id}
+ }
+ }
+ return nil
+}
+
+func (p *Package) String() string { return p.ID }
+
+// loaderPackage augments Package with state used during the loading phase
+type loaderPackage struct {
+ *Package
+ importErrors map[string]error // maps each bad import to its error
+ loadOnce sync.Once
+ color uint8 // for cycle detection
+ mark, needsrc bool // used when Mode >= LoadTypes
+}
+
+// loader holds the working state of a single call to load.
+type loader struct {
+ pkgs map[string]*loaderPackage
+ Config
+ exportMu sync.Mutex // enforces mutual exclusion of exportdata operations
+}
+
+func newLoader(cfg *Config) *loader {
+ ld := &loader{}
+ if cfg != nil {
+ ld.Config = *cfg
+ }
+ if ld.Context == nil {
+ ld.Context = context.Background()
+ }
+ if ld.Dir == "" {
+ if dir, err := os.Getwd(); err == nil {
+ ld.Dir = dir
+ }
+ }
+
+ if ld.Mode >= LoadTypes {
+ if ld.Fset == nil {
+ ld.Fset = token.NewFileSet()
+ }
+
+ // Error and ParseFile are required even in LoadTypes mode
+ // because we load source if export data is missing.
+
+ if ld.Error == nil {
+ ld.Error = func(e error) {
+ fmt.Fprintln(os.Stderr, e)
+ }
+ }
+
+ if ld.ParseFile == nil {
+ ld.ParseFile = func(fset *token.FileSet, filename string) (*ast.File, error) {
+ const mode = parser.AllErrors | parser.ParseComments
+ return parser.ParseFile(fset, filename, nil, mode)
+ }
+ }
+ }
+ return ld
+}
+
+// refine connects the supplied packages into a graph and then adds type and
+// and syntax information as requested by the LoadMode.
+func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
+ if len(list) == 0 {
+ return nil, fmt.Errorf("packages not found")
+ }
+ isRoot := make(map[string]bool, len(roots))
+ for _, root := range roots {
+ isRoot[root] = true
+ }
+ ld.pkgs = make(map[string]*loaderPackage)
+ // first pass, fixup and build the map and roots
+ var initial []*loaderPackage
+ for _, pkg := range list {
+ lpkg := &loaderPackage{
+ Package: pkg,
+ needsrc: ld.Mode >= LoadAllSyntax ||
+ (ld.Mode >= LoadSyntax && isRoot[pkg.ID]) ||
+ (pkg.ExportFile == "" && pkg.PkgPath != "unsafe"),
+ }
+ ld.pkgs[lpkg.ID] = lpkg
+ if isRoot[lpkg.ID] {
+ initial = append(initial, lpkg)
+ }
+ }
+
+ // Materialize the import graph.
+
+ const (
+ white = 0 // new
+ grey = 1 // in progress
+ black = 2 // complete
+ )
+
+ // visit traverses the import graph, depth-first,
+ // and materializes the graph as Packages.Imports.
+ //
+ // Valid imports are saved in the Packages.Import map.
+ // Invalid imports (cycles and missing nodes) are saved in the importErrors map.
+ // Thus, even in the presence of both kinds of errors, the Import graph remains a DAG.
+ //
+ // visit returns whether the package needs src or has a transitive
+ // dependency on a package that does. These are the only packages
+ // for which we load source code.
+ var stack []*loaderPackage
+ var visit func(lpkg *loaderPackage) bool
+ visit = func(lpkg *loaderPackage) bool {
+ switch lpkg.color {
+ case black:
+ return lpkg.needsrc
+ case grey:
+ panic("internal error: grey node")
+ }
+ lpkg.color = grey
+ stack = append(stack, lpkg) // push
+ stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports
+ lpkg.Imports = make(map[string]*Package, len(stubs))
+ for importPath, ipkg := range stubs {
+ var importErr error
+ imp := ld.pkgs[ipkg.ID]
+ if imp == nil {
+ // (includes package "C" when DisableCgo)
+ importErr = fmt.Errorf("missing package: %q", ipkg.ID)
+ } else if imp.color == grey {
+ importErr = fmt.Errorf("import cycle: %s", stack)
+ }
+ if importErr != nil {
+ if lpkg.importErrors == nil {
+ lpkg.importErrors = make(map[string]error)
+ }
+ lpkg.importErrors[importPath] = importErr
+ continue
+ }
+
+ if visit(imp) {
+ lpkg.needsrc = true
+ }
+ lpkg.Imports[importPath] = imp.Package
+ }
+
+ stack = stack[:len(stack)-1] // pop
+ lpkg.color = black
+
+ return lpkg.needsrc
+ }
+
+ if ld.Mode < LoadImports {
+ //we do this to drop the stub import packages that we are not even going to try to resolve
+ for _, lpkg := range initial {
+ lpkg.Imports = nil
+ }
+ } else {
+ // For each initial package, create its import DAG.
+ for _, lpkg := range initial {
+ visit(lpkg)
+ }
+ }
+ // Load type data if needed, starting at
+ // the initial packages (roots of the import DAG).
+ if ld.Mode >= LoadTypes {
+ var wg sync.WaitGroup
+ for _, lpkg := range initial {
+ wg.Add(1)
+ go func(lpkg *loaderPackage) {
+ ld.loadRecursive(lpkg)
+ wg.Done()
+ }(lpkg)
+ }
+ wg.Wait()
+ }
+
+ result := make([]*Package, len(initial))
+ for i, lpkg := range initial {
+ result[i] = lpkg.Package
+ }
+ return result, nil
+}
+
+// loadRecursive loads the specified package and its dependencies,
+// recursively, in parallel, in topological order.
+// It is atomic and idempotent.
+// Precondition: ld.Mode >= LoadTypes.
+func (ld *loader) loadRecursive(lpkg *loaderPackage) {
+ lpkg.loadOnce.Do(func() {
+ // Load the direct dependencies, in parallel.
+ var wg sync.WaitGroup
+ for _, ipkg := range lpkg.Imports {
+ imp := ld.pkgs[ipkg.ID]
+ wg.Add(1)
+ go func(imp *loaderPackage) {
+ ld.loadRecursive(imp)
+ wg.Done()
+ }(imp)
+ }
+ wg.Wait()
+
+ ld.loadPackage(lpkg)
+ })
+}
+
+// loadPackage loads the specified package.
+// It must be called only once per Package,
+// after immediate dependencies are loaded.
+// Precondition: ld.Mode >= LoadTypes.
+func (ld *loader) loadPackage(lpkg *loaderPackage) {
+ if lpkg.PkgPath == "unsafe" {
+ // Fill in the blanks to avoid surprises.
+ lpkg.Types = types.Unsafe
+ lpkg.Fset = ld.Fset
+ lpkg.Syntax = []*ast.File{}
+ lpkg.TypesInfo = new(types.Info)
+ return
+ }
+
+ // Call NewPackage directly with explicit name.
+ // This avoids skew between golist and go/types when the files'
+ // package declarations are inconsistent.
+ lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name)
+
+ if !lpkg.needsrc {
+ ld.loadFromExportData(lpkg)
+ return // not a source package, don't get syntax trees
+ }
+
+ hardErrors := false
+ appendError := func(err error) {
+ if terr, ok := err.(types.Error); ok && terr.Soft {
+ // Don't mark the package as bad.
+ } else {
+ hardErrors = true
+ }
+ ld.Error(err)
+ lpkg.Errors = append(lpkg.Errors, err)
+ }
+
+ files, errs := ld.parseFiles(lpkg.CompiledGoFiles)
+ for _, err := range errs {
+ appendError(err)
+ }
+
+ lpkg.Fset = ld.Fset
+ lpkg.Syntax = files
+
+ lpkg.TypesInfo = &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+
+ // Copy the prototype types.Config as it must vary across Packages.
+ tc := ld.TypeChecker // copy
+ tc.Importer = importerFunc(func(path string) (*types.Package, error) {
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+
+ // The imports map is keyed by import path.
+ ipkg := lpkg.Imports[path]
+ if ipkg == nil {
+ if err := lpkg.importErrors[path]; err != nil {
+ return nil, err
+ }
+ // There was skew between the metadata and the
+ // import declarations, likely due to an edit
+ // race, or because the ParseFile feature was
+ // used to supply alternative file contents.
+ return nil, fmt.Errorf("no metadata for %s", path)
+ }
+
+ if ipkg.Types != nil && ipkg.Types.Complete() {
+ return ipkg.Types, nil
+ }
+ log.Fatalf("internal error: nil Pkg importing %q from %q", path, lpkg)
+ panic("unreachable")
+ })
+ tc.Error = appendError
+
+ // type-check
+ types.NewChecker(&tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax)
+
+ lpkg.importErrors = nil // no longer needed
+
+ // If !Cgo, the type-checker uses FakeImportC mode, so
+ // it doesn't invoke the importer for import "C",
+ // nor report an error for the import,
+ // or for any undefined C.f reference.
+ // We must detect this explicitly and correctly
+ // mark the package as IllTyped (by reporting an error).
+ // TODO(adonovan): if these errors are annoying,
+ // we could just set IllTyped quietly.
+ if tc.FakeImportC {
+ outer:
+ for _, f := range lpkg.Syntax {
+ for _, imp := range f.Imports {
+ if imp.Path.Value == `"C"` {
+ appendError(fmt.Errorf(`%s: import "C" ignored`,
+ lpkg.Fset.Position(imp.Pos())))
+ break outer
+ }
+ }
+ }
+ }
+
+ // Record accumulated errors.
+ for _, imp := range lpkg.Imports {
+ if imp.IllTyped {
+ hardErrors = true
+ break
+ }
+ }
+
+ lpkg.IllTyped = hardErrors
+}
+
+// An importFunc is an implementation of the single-method
+// types.Importer interface based on a function value.
+type importerFunc func(path string) (*types.Package, error)
+
+func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }
+
+// We use a counting semaphore to limit
+// the number of parallel I/O calls per process.
+var ioLimit = make(chan bool, 20)
+
+// parseFiles reads and parses the Go source files and returns the ASTs
+// of the ones that could be at least partially parsed, along with a
+// list of I/O and parse errors encountered.
+//
+// Because files are scanned in parallel, the token.Pos
+// positions of the resulting ast.Files are not ordered.
+//
+func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
+ var wg sync.WaitGroup
+ n := len(filenames)
+ parsed := make([]*ast.File, n)
+ errors := make([]error, n)
+ for i, file := range filenames {
+ wg.Add(1)
+ go func(i int, filename string) {
+ ioLimit <- true // wait
+ // ParseFile may return both an AST and an error.
+ parsed[i], errors[i] = ld.ParseFile(ld.Fset, filename)
+ <-ioLimit // signal
+ wg.Done()
+ }(i, file)
+ }
+ wg.Wait()
+
+ // Eliminate nils, preserving order.
+ var o int
+ for _, f := range parsed {
+ if f != nil {
+ parsed[o] = f
+ o++
+ }
+ }
+ parsed = parsed[:o]
+
+ o = 0
+ for _, err := range errors {
+ if err != nil {
+ errors[o] = err
+ o++
+ }
+ }
+ errors = errors[:o]
+
+ return parsed, errors
+}
+
+// loadFromExportData returns type information for the specified
+// package, loading it from an export data file on the first request.
+func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) {
+ if lpkg.PkgPath == "" {
+ log.Fatalf("internal error: Package %s has no PkgPath", lpkg)
+ }
+
+ // Because gcexportdata.Read has the potential to create or
+ // modify the types.Package for each node in the transitive
+ // closure of dependencies of lpkg, all exportdata operations
+ // must be sequential. (Finer-grained locking would require
+ // changes to the gcexportdata API.)
+ //
+ // The exportMu lock guards the Package.Pkg field and the
+ // types.Package it points to, for each Package in the graph.
+ //
+ // Not all accesses to Package.Pkg need to be protected by exportMu:
+ // graph ordering ensures that direct dependencies of source
+ // packages are fully loaded before the importer reads their Pkg field.
+ ld.exportMu.Lock()
+ defer ld.exportMu.Unlock()
+
+ if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() {
+ return tpkg, nil // cache hit
+ }
+
+ lpkg.IllTyped = true // fail safe
+
+ if lpkg.ExportFile == "" {
+ // Errors while building export data will have been printed to stderr.
+ return nil, fmt.Errorf("no export data file")
+ }
+ f, err := os.Open(lpkg.ExportFile)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+
+ // Read gc export data.
+ //
+ // We don't currently support gccgo export data because all
+ // underlying workspaces use the gc toolchain. (Even build
+ // systems that support gccgo don't use it for workspace
+ // queries.)
+ r, err := gcexportdata.NewReader(f)
+ if err != nil {
+ return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ }
+
+ // Build the view.
+ //
+ // The gcexportdata machinery has no concept of package ID.
+ // It identifies packages by their PkgPath, which although not
+ // globally unique is unique within the scope of one invocation
+ // of the linker, type-checker, or gcexportdata.
+ //
+ // So, we must build a PkgPath-keyed view of the global
+ // (conceptually ID-keyed) cache of packages and pass it to
+ // gcexportdata. The view must contain every existing
+ // package that might possibly be mentioned by the
+ // current package---its transitive closure.
+ //
+ // TODO(adonovan): it would be more simpler and more efficient
+ // if the export data machinery invoked a callback to
+ // get-or-create a package instead of a map.
+ //
+ view := make(map[string]*types.Package) // view seen by gcexportdata
+ seen := make(map[*loaderPackage]bool) // all visited packages
+ var visit func(pkgs map[string]*Package)
+ visit = func(pkgs map[string]*Package) {
+ for _, p := range pkgs {
+ lpkg := ld.pkgs[p.ID]
+ if !seen[lpkg] {
+ seen[lpkg] = true
+ view[lpkg.PkgPath] = lpkg.Types
+ visit(lpkg.Imports)
+ }
+ }
+ }
+ visit(lpkg.Imports)
+
+ // Parse the export data.
+ // (May create/modify packages in view.)
+ tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)
+ if err != nil {
+ return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ }
+
+ lpkg.Types = tpkg
+ lpkg.IllTyped = false
+
+ return tpkg, nil
+}
+
+func usesExportData(cfg *Config) bool {
+ return LoadTypes <= cfg.Mode && cfg.Mode < LoadAllSyntax
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/imports.go b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
new file mode 100644
index 00000000..9c441dba
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/imports.go
@@ -0,0 +1,31 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import "go/types"
+
+// Dependencies returns all dependencies of the specified packages.
+//
+// Dependent packages appear in topological order: if package P imports
+// package Q, Q appears earlier than P in the result.
+// The algorithm follows import statements in the order they
+// appear in the source code, so the result is a total order.
+//
+func Dependencies(pkgs ...*types.Package) []*types.Package {
+ var result []*types.Package
+ seen := make(map[*types.Package]bool)
+ var visit func(pkgs []*types.Package)
+ visit = func(pkgs []*types.Package) {
+ for _, p := range pkgs {
+ if !seen[p] {
+ seen[p] = true
+ visit(p.Imports())
+ result = append(result, p)
+ }
+ }
+ }
+ visit(pkgs)
+ return result
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go
new file mode 100644
index 00000000..c7f75450
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go
@@ -0,0 +1,313 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package typeutil defines various utilities for types, such as Map,
+// a mapping from types.Type to interface{} values.
+package typeutil // import "golang.org/x/tools/go/types/typeutil"
+
+import (
+ "bytes"
+ "fmt"
+ "go/types"
+ "reflect"
+)
+
+// Map is a hash-table-based mapping from types (types.Type) to
+// arbitrary interface{} values. The concrete types that implement
+// the Type interface are pointers. Since they are not canonicalized,
+// == cannot be used to check for equivalence, and thus we cannot
+// simply use a Go map.
+//
+// Just as with map[K]V, a nil *Map is a valid empty map.
+//
+// Not thread-safe.
+//
+type Map struct {
+ hasher Hasher // shared by many Maps
+ table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
+ length int // number of map entries
+}
+
+// entry is an entry (key/value association) in a hash bucket.
+type entry struct {
+ key types.Type
+ value interface{}
+}
+
+// SetHasher sets the hasher used by Map.
+//
+// All Hashers are functionally equivalent but contain internal state
+// used to cache the results of hashing previously seen types.
+//
+// A single Hasher created by MakeHasher() may be shared among many
+// Maps. This is recommended if the instances have many keys in
+// common, as it will amortize the cost of hash computation.
+//
+// A Hasher may grow without bound as new types are seen. Even when a
+// type is deleted from the map, the Hasher never shrinks, since other
+// types in the map may reference the deleted type indirectly.
+//
+// Hashers are not thread-safe, and read-only operations such as
+// Map.Lookup require updates to the hasher, so a full Mutex lock (not a
+// read-lock) is require around all Map operations if a shared
+// hasher is accessed from multiple threads.
+//
+// If SetHasher is not called, the Map will create a private hasher at
+// the first call to Insert.
+//
+func (m *Map) SetHasher(hasher Hasher) {
+ m.hasher = hasher
+}
+
+// Delete removes the entry with the given key, if any.
+// It returns true if the entry was found.
+//
+func (m *Map) Delete(key types.Type) bool {
+ if m != nil && m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ for i, e := range bucket {
+ if e.key != nil && types.Identical(key, e.key) {
+ // We can't compact the bucket as it
+ // would disturb iterators.
+ bucket[i] = entry{}
+ m.length--
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// At returns the map entry for the given key.
+// The result is nil if the entry is not present.
+//
+func (m *Map) At(key types.Type) interface{} {
+ if m != nil && m.table != nil {
+ for _, e := range m.table[m.hasher.Hash(key)] {
+ if e.key != nil && types.Identical(key, e.key) {
+ return e.value
+ }
+ }
+ }
+ return nil
+}
+
+// Set sets the map entry for key to val,
+// and returns the previous entry, if any.
+func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) {
+ if m.table != nil {
+ hash := m.hasher.Hash(key)
+ bucket := m.table[hash]
+ var hole *entry
+ for i, e := range bucket {
+ if e.key == nil {
+ hole = &bucket[i]
+ } else if types.Identical(key, e.key) {
+ prev = e.value
+ bucket[i].value = value
+ return
+ }
+ }
+
+ if hole != nil {
+ *hole = entry{key, value} // overwrite deleted entry
+ } else {
+ m.table[hash] = append(bucket, entry{key, value})
+ }
+ } else {
+ if m.hasher.memo == nil {
+ m.hasher = MakeHasher()
+ }
+ hash := m.hasher.Hash(key)
+ m.table = map[uint32][]entry{hash: {entry{key, value}}}
+ }
+
+ m.length++
+ return
+}
+
+// Len returns the number of map entries.
+func (m *Map) Len() int {
+ if m != nil {
+ return m.length
+ }
+ return 0
+}
+
+// Iterate calls function f on each entry in the map in unspecified order.
+//
+// If f should mutate the map, Iterate provides the same guarantees as
+// Go maps: if f deletes a map entry that Iterate has not yet reached,
+// f will not be invoked for it, but if f inserts a map entry that
+// Iterate has not yet reached, whether or not f will be invoked for
+// it is unspecified.
+//
+func (m *Map) Iterate(f func(key types.Type, value interface{})) {
+ if m != nil {
+ for _, bucket := range m.table {
+ for _, e := range bucket {
+ if e.key != nil {
+ f(e.key, e.value)
+ }
+ }
+ }
+ }
+}
+
+// Keys returns a new slice containing the set of map keys.
+// The order is unspecified.
+func (m *Map) Keys() []types.Type {
+ keys := make([]types.Type, 0, m.Len())
+ m.Iterate(func(key types.Type, _ interface{}) {
+ keys = append(keys, key)
+ })
+ return keys
+}
+
+func (m *Map) toString(values bool) string {
+ if m == nil {
+ return "{}"
+ }
+ var buf bytes.Buffer
+ fmt.Fprint(&buf, "{")
+ sep := ""
+ m.Iterate(func(key types.Type, value interface{}) {
+ fmt.Fprint(&buf, sep)
+ sep = ", "
+ fmt.Fprint(&buf, key)
+ if values {
+ fmt.Fprintf(&buf, ": %q", value)
+ }
+ })
+ fmt.Fprint(&buf, "}")
+ return buf.String()
+}
+
+// String returns a string representation of the map's entries.
+// Values are printed using fmt.Sprintf("%v", v).
+// Order is unspecified.
+//
+func (m *Map) String() string {
+ return m.toString(true)
+}
+
+// KeysString returns a string representation of the map's key set.
+// Order is unspecified.
+//
+func (m *Map) KeysString() string {
+ return m.toString(false)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Hasher
+
+// A Hasher maps each type to its hash value.
+// For efficiency, a hasher uses memoization; thus its memory
+// footprint grows monotonically over time.
+// Hashers are not thread-safe.
+// Hashers have reference semantics.
+// Call MakeHasher to create a Hasher.
+type Hasher struct {
+ memo map[types.Type]uint32
+}
+
+// MakeHasher returns a new Hasher instance.
+func MakeHasher() Hasher {
+ return Hasher{make(map[types.Type]uint32)}
+}
+
+// Hash computes a hash value for the given type t such that
+// Identical(t, t') => Hash(t) == Hash(t').
+func (h Hasher) Hash(t types.Type) uint32 {
+ hash, ok := h.memo[t]
+ if !ok {
+ hash = h.hashFor(t)
+ h.memo[t] = hash
+ }
+ return hash
+}
+
+// hashString computes the Fowler–Noll–Vo hash of s.
+func hashString(s string) uint32 {
+ var h uint32
+ for i := 0; i < len(s); i++ {
+ h ^= uint32(s[i])
+ h *= 16777619
+ }
+ return h
+}
+
+// hashFor computes the hash of t.
+func (h Hasher) hashFor(t types.Type) uint32 {
+ // See Identical for rationale.
+ switch t := t.(type) {
+ case *types.Basic:
+ return uint32(t.Kind())
+
+ case *types.Array:
+ return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem())
+
+ case *types.Slice:
+ return 9049 + 2*h.Hash(t.Elem())
+
+ case *types.Struct:
+ var hash uint32 = 9059
+ for i, n := 0, t.NumFields(); i < n; i++ {
+ f := t.Field(i)
+ if f.Anonymous() {
+ hash += 8861
+ }
+ hash += hashString(t.Tag(i))
+ hash += hashString(f.Name()) // (ignore f.Pkg)
+ hash += h.Hash(f.Type())
+ }
+ return hash
+
+ case *types.Pointer:
+ return 9067 + 2*h.Hash(t.Elem())
+
+ case *types.Signature:
+ var hash uint32 = 9091
+ if t.Variadic() {
+ hash *= 8863
+ }
+ return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
+
+ case *types.Interface:
+ var hash uint32 = 9103
+ for i, n := 0, t.NumMethods(); i < n; i++ {
+ // See go/types.identicalMethods for rationale.
+ // Method order is not significant.
+ // Ignore m.Pkg().
+ m := t.Method(i)
+ hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type())
+ }
+ return hash
+
+ case *types.Map:
+ return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem())
+
+ case *types.Chan:
+ return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem())
+
+ case *types.Named:
+ // Not safe with a copying GC; objects may move.
+ return uint32(reflect.ValueOf(t.Obj()).Pointer())
+
+ case *types.Tuple:
+ return h.hashTuple(t)
+ }
+ panic(t)
+}
+
+func (h Hasher) hashTuple(tuple *types.Tuple) uint32 {
+ // See go/types.identicalTypes for rationale.
+ n := tuple.Len()
+ var hash uint32 = 9137 + 2*uint32(n)
+ for i := 0; i < n; i++ {
+ hash += 3 * h.Hash(tuple.At(i).Type())
+ }
+ return hash
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
new file mode 100644
index 00000000..32084610
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
@@ -0,0 +1,72 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements a cache of method sets.
+
+package typeutil
+
+import (
+ "go/types"
+ "sync"
+)
+
+// A MethodSetCache records the method set of each type T for which
+// MethodSet(T) is called so that repeat queries are fast.
+// The zero value is a ready-to-use cache instance.
+type MethodSetCache struct {
+ mu sync.Mutex
+ named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N
+ others map[types.Type]*types.MethodSet // all other types
+}
+
+// MethodSet returns the method set of type T. It is thread-safe.
+//
+// If cache is nil, this function is equivalent to types.NewMethodSet(T).
+// Utility functions can thus expose an optional *MethodSetCache
+// parameter to clients that care about performance.
+//
+func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet {
+ if cache == nil {
+ return types.NewMethodSet(T)
+ }
+ cache.mu.Lock()
+ defer cache.mu.Unlock()
+
+ switch T := T.(type) {
+ case *types.Named:
+ return cache.lookupNamed(T).value
+
+ case *types.Pointer:
+ if N, ok := T.Elem().(*types.Named); ok {
+ return cache.lookupNamed(N).pointer
+ }
+ }
+
+ // all other types
+ // (The map uses pointer equivalence, not type identity.)
+ mset := cache.others[T]
+ if mset == nil {
+ mset = types.NewMethodSet(T)
+ if cache.others == nil {
+ cache.others = make(map[types.Type]*types.MethodSet)
+ }
+ cache.others[T] = mset
+ }
+ return mset
+}
+
+func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } {
+ if cache.named == nil {
+ cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet })
+ }
+ // Avoid recomputing mset(*T) for each distinct Pointer
+ // instance whose underlying type is a named type.
+ msets, ok := cache.named[named]
+ if !ok {
+ msets.value = types.NewMethodSet(named)
+ msets.pointer = types.NewMethodSet(types.NewPointer(named))
+ cache.named[named] = msets
+ }
+ return msets
+}
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
new file mode 100644
index 00000000..9849c24c
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/types/typeutil/ui.go
@@ -0,0 +1,52 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+// This file defines utilities for user interfaces that display types.
+
+import "go/types"
+
+// IntuitiveMethodSet returns the intuitive method set of a type T,
+// which is the set of methods you can call on an addressable value of
+// that type.
+//
+// The result always contains MethodSet(T), and is exactly MethodSet(T)
+// for interface types and for pointer-to-concrete types.
+// For all other concrete types T, the result additionally
+// contains each method belonging to *T if there is no identically
+// named method on T itself.
+//
+// This corresponds to user intuition about method sets;
+// this function is intended only for user interfaces.
+//
+// The order of the result is as for types.MethodSet(T).
+//
+func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection {
+ isPointerToConcrete := func(T types.Type) bool {
+ ptr, ok := T.(*types.Pointer)
+ return ok && !types.IsInterface(ptr.Elem())
+ }
+
+ var result []*types.Selection
+ mset := msets.MethodSet(T)
+ if types.IsInterface(T) || isPointerToConcrete(T) {
+ for i, n := 0, mset.Len(); i < n; i++ {
+ result = append(result, mset.At(i))
+ }
+ } else {
+ // T is some other concrete type.
+ // Report methods of T and *T, preferring those of T.
+ pmset := msets.MethodSet(types.NewPointer(T))
+ for i, n := 0, pmset.Len(); i < n; i++ {
+ meth := pmset.At(i)
+ if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil {
+ meth = m
+ }
+ result = append(result, meth)
+ }
+
+ }
+ return result
+}