From c8ad4dbfd9511f4cfa748fa85c01fbca2edb348a Mon Sep 17 00:00:00 2001 From: Amine Hilaly Date: Sun, 21 Apr 2019 21:26:42 +0200 Subject: Add github iterator use `goto` in .Next* functions Update iterator.go --- bridge/github/import_query.go | 6 +- bridge/github/iterator.go | 406 +++++++++++++++++++++++++++++++++++++++++ bridge/github/iterator_test.go | 44 +++++ 3 files changed, 453 insertions(+), 3 deletions(-) create mode 100644 bridge/github/iterator.go create mode 100644 bridge/github/iterator_test.go diff --git a/bridge/github/import_query.go b/bridge/github/import_query.go index 59799f6a..4d5886f6 100644 --- a/bridge/github/import_query.go +++ b/bridge/github/import_query.go @@ -128,7 +128,7 @@ type issueTimelineQuery struct { Issues struct { Nodes []issueTimeline PageInfo pageInfo - } `graphql:"issues(first: $issueFirst, after: $issueAfter, orderBy: {field: CREATED_AT, direction: ASC})"` + } `graphql:"issues(first: $issueFirst, after: $issueAfter, orderBy: {field: CREATED_AT, direction: ASC}, filterBy: {since: $issueSince})"` } `graphql:"repository(owner: $owner, name: $name)"` } @@ -137,7 +137,7 @@ type issueEditQuery struct { Issues struct { Nodes []issueEdit PageInfo pageInfo - } `graphql:"issues(first: $issueFirst, after: $issueAfter, orderBy: {field: CREATED_AT, direction: ASC})"` + } `graphql:"issues(first: $issueFirst, after: $issueAfter, orderBy: {field: CREATED_AT, direction: ASC}, filterBy: {since: $issueSince})"` } `graphql:"repository(owner: $owner, name: $name)"` } @@ -156,7 +156,7 @@ type commentEditQuery struct { } } `graphql:"timeline(first: $timelineFirst, after: $timelineAfter)"` } - } `graphql:"issues(first: $issueFirst, after: $issueAfter, orderBy: {field: CREATED_AT, direction: ASC})"` + } `graphql:"issues(first: $issueFirst, after: $issueAfter, orderBy: {field: CREATED_AT, direction: ASC}, filterBy: {since: $issueSince})"` } `graphql:"repository(owner: $owner, name: $name)"` } diff --git a/bridge/github/iterator.go b/bridge/github/iterator.go new file mode 100644 index 00000000..cb7c9760 --- /dev/null +++ b/bridge/github/iterator.go @@ -0,0 +1,406 @@ +package github + +import ( + "context" + "time" + + "github.com/MichaelMure/git-bug/bridge/core" + "github.com/shurcooL/githubv4" +) + +/** +type iterator interface { + Count() int + Error() error + + NextIssue() bool + NextIssueEdit() bool + NextTimeline() bool + NextCommentEdit() bool + + IssueValue() issueTimeline + IssueEditValue() userContentEdit + TimelineValue() timelineItem + CommentEditValue() userContentEdit +} +*/ + +type indexer struct{ index int } + +type issueEditIterator struct { + index int + query issueEditQuery + variables map[string]interface{} +} + +type commentEditIterator struct { + index int + query commentEditQuery + variables map[string]interface{} +} + +type timelineIterator struct { + index int + query issueTimelineQuery + variables map[string]interface{} + + issueEdit indexer + commentEdit indexer +} + +type iterator struct { + // github graphql client + gc *githubv4.Client + + // if since is given the iterator will query only the updated + // and created issues after this date + since time.Time + + // number of timelines/userEditcontent/issueEdit to query + // at a time more capacity = more used memory = less queries + // to make + capacity int + + // sticky error + err error + + // count to keep track of the number of imported issues + count int + + // timeline iterator + timeline timelineIterator + + // issue edit iterator + issueEdit issueEditIterator + + // comment edit iterator + commentEdit commentEditIterator +} + +func newIterator(conf core.Configuration, since time.Time) *iterator { + return &iterator{ + since: since, + gc: buildClient(conf), + capacity: 8, + count: -1, + + timeline: timelineIterator{ + index: -1, + issueEdit: indexer{-1}, + commentEdit: indexer{-1}, + variables: map[string]interface{}{ + "owner": githubv4.String(conf["user"]), + "name": githubv4.String(conf["project"]), + }, + }, + commentEdit: commentEditIterator{ + index: -1, + variables: map[string]interface{}{ + "owner": githubv4.String(conf["user"]), + "name": githubv4.String(conf["project"]), + }, + }, + issueEdit: issueEditIterator{ + index: -1, + variables: map[string]interface{}{ + "owner": githubv4.String(conf["user"]), + "name": githubv4.String(conf["project"]), + }, + }, + } +} + +// init issue timeline variables +func (i *iterator) initTimelineQueryVariables() { + i.timeline.variables["issueFirst"] = githubv4.Int(1) + i.timeline.variables["issueAfter"] = (*githubv4.String)(nil) + i.timeline.variables["issueSince"] = githubv4.DateTime{Time: i.since} + i.timeline.variables["timelineFirst"] = githubv4.Int(i.capacity) + i.timeline.variables["timelineAfter"] = (*githubv4.String)(nil) + i.timeline.variables["issueEditLast"] = githubv4.Int(i.capacity) + i.timeline.variables["issueEditBefore"] = (*githubv4.String)(nil) + i.timeline.variables["commentEditLast"] = githubv4.Int(i.capacity) + i.timeline.variables["commentEditBefore"] = (*githubv4.String)(nil) +} + +// init issue edit variables +func (i *iterator) initIssueEditQueryVariables() { + i.issueEdit.variables["issueFirst"] = githubv4.Int(1) + i.issueEdit.variables["issueAfter"] = i.timeline.variables["issueAfter"] + i.issueEdit.variables["issueSince"] = githubv4.DateTime{Time: i.since} + i.issueEdit.variables["issueEditLast"] = githubv4.Int(i.capacity) + i.issueEdit.variables["issueEditBefore"] = (*githubv4.String)(nil) +} + +// init issue comment variables +func (i *iterator) initCommentEditQueryVariables() { + i.commentEdit.variables["issueFirst"] = githubv4.Int(1) + i.commentEdit.variables["issueAfter"] = i.timeline.variables["issueAfter"] + i.commentEdit.variables["issueSince"] = githubv4.DateTime{Time: i.since} + i.commentEdit.variables["timelineFirst"] = githubv4.Int(1) + i.commentEdit.variables["timelineAfter"] = (*githubv4.String)(nil) + i.commentEdit.variables["commentEditLast"] = githubv4.Int(i.capacity) + i.commentEdit.variables["commentEditBefore"] = (*githubv4.String)(nil) +} + +// reverse UserContentEdits arrays in both of the issue and +// comment timelines +func (i *iterator) reverseTimelineEditNodes() { + reverseEdits(i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) + for index, ce := range i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges { + if ce.Node.Typename == "IssueComment" && len(i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges) != 0 { + reverseEdits(i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[index].Node.IssueComment.UserContentEdits.Nodes) + } + } +} + +// Error . +func (i *iterator) Error() error { + return i.err +} + +// Count . +func (i *iterator) Count() int { + return i.count +} + +func (i *iterator) NextIssue() bool { + // we make the first move + if i.count == -1 { + + // init variables and goto queryIssue block + i.initTimelineQueryVariables() + goto queryIssue + } + + if i.err != nil { + return false + } + + if !i.timeline.query.Repository.Issues.PageInfo.HasNextPage { + return false + } + + // if we have more pages updates variables and query them + i.timeline.variables["timelineAfter"] = (*githubv4.String)(nil) + i.timeline.variables["issueAfter"] = i.timeline.query.Repository.Issues.PageInfo.EndCursor + i.timeline.index = -1 + + // query issue block +queryIssue: + if err := i.gc.Query(context.TODO(), &i.timeline.query, i.timeline.variables); err != nil { + i.err = err + return false + } + + if len(i.timeline.query.Repository.Issues.Nodes) == 0 { + return false + } + + i.reverseTimelineEditNodes() + i.count++ + return true +} + +func (i *iterator) IssueValue() issueTimeline { + return i.timeline.query.Repository.Issues.Nodes[0] +} + +func (i *iterator) NextTimeline() bool { + if i.err != nil { + return false + } + + if len(i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges) == 0 { + return false + } + + if i.timeline.index < min(i.capacity, len(i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges))-1 { + i.timeline.index++ + return true + } + + if !i.timeline.query.Repository.Issues.Nodes[0].Timeline.PageInfo.HasNextPage { + return false + } + + // more timelines, query them + i.timeline.variables["timelineAfter"] = i.timeline.query.Repository.Issues.Nodes[0].Timeline.PageInfo.EndCursor + if err := i.gc.Query(context.TODO(), &i.timeline.query, i.timeline.variables); err != nil { + i.err = err + return false + } + + i.reverseTimelineEditNodes() + i.timeline.index = 0 + return true +} + +func (i *iterator) TimelineValue() timelineItem { + return i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index].Node +} + +func (i *iterator) timelineCursor() string { + return "" +} + +func (i *iterator) NextIssueEdit() bool { + if i.err != nil { + return false + } + + // this mean we looped over all available issue edits in the timeline. + // now we have to use i.issueEditQuery + if i.timeline.issueEdit.index == -2 { + if i.issueEdit.index < min(i.capacity, len(i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes))-1 { + i.issueEdit.index++ + return true + } + + if !i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.PageInfo.HasPreviousPage { + i.timeline.issueEdit.index = -1 + i.issueEdit.index = -1 + return false + } + + // if there is more edits, query them + i.issueEdit.variables["issueEditBefore"] = i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.PageInfo.StartCursor + goto queryIssueEdit + } + + // if there is no edits + if len(i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) == 0 { + return false + } + + // loop over them timeline comment edits + if i.timeline.issueEdit.index < min(i.capacity, len(i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes))-1 { + i.timeline.issueEdit.index++ + return true + } + + if !i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.PageInfo.HasPreviousPage { + i.timeline.issueEdit.index = -1 + return false + } + + // if there is more edits, query them + i.initIssueEditQueryVariables() + i.issueEdit.variables["issueEditBefore"] = i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.PageInfo.StartCursor + +queryIssueEdit: + if err := i.gc.Query(context.TODO(), &i.issueEdit.query, i.issueEdit.variables); err != nil { + i.err = err + //i.timeline.issueEdit.index = -1 + return false + } + + // reverse issue edits because github + reverseEdits(i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) + + // this is not supposed to happen + if len(i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) == 0 { + i.timeline.issueEdit.index = -1 + return false + } + + i.issueEdit.index = 0 + i.timeline.issueEdit.index = -2 + return true +} + +func (i *iterator) IssueEditValue() userContentEdit { + // if we are using issue edit query + if i.timeline.issueEdit.index == -2 { + return i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes[i.issueEdit.index] + } + + // else get it from timeline issue edit query + return i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes[i.timeline.issueEdit.index] +} + +func (i *iterator) NextCommentEdit() bool { + if i.err != nil { + return false + } + + // same as NextIssueEdit + if i.timeline.commentEdit.index == -2 { + + if i.commentEdit.index < min(i.capacity, len(i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.Nodes))-1 { + i.commentEdit.index++ + return true + } + + if !i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.PageInfo.HasPreviousPage { + i.timeline.commentEdit.index = -1 + i.commentEdit.index = -1 + return false + } + + // if there is more comment edits, query them + i.commentEdit.variables["commentEditBefore"] = i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.PageInfo.StartCursor + goto queryCommentEdit + } + + // if there is no comment edits + if len(i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index].Node.IssueComment.UserContentEdits.Nodes) == 0 { + return false + } + + // loop over them timeline comment edits + if i.timeline.commentEdit.index < min(i.capacity, len(i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index].Node.IssueComment.UserContentEdits.Nodes))-1 { + i.timeline.commentEdit.index++ + return true + } + + if !i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index].Node.IssueComment.UserContentEdits.PageInfo.HasPreviousPage { + i.timeline.commentEdit.index = -1 + return false + } + + // if there is more comment edits, query them + + i.initCommentEditQueryVariables() + if i.timeline.index == 0 { + i.commentEdit.variables["timelineAfter"] = i.timeline.query.Repository.Issues.Nodes[0].Timeline.PageInfo.EndCursor + } else { + i.commentEdit.variables["timelineAfter"] = i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index-1].Cursor + } + + i.commentEdit.variables["commentEditBefore"] = i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index].Node.IssueComment.UserContentEdits.PageInfo.StartCursor + +queryCommentEdit: + if err := i.gc.Query(context.TODO(), &i.commentEdit.query, i.commentEdit.variables); err != nil { + i.err = err + return false + } + + // this is not supposed to happen + if len(i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.Nodes) == 0 { + i.timeline.commentEdit.index = -1 + return false + } + + reverseEdits(i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.Nodes) + + i.commentEdit.index = 0 + i.timeline.commentEdit.index = -2 + return true +} + +func (i *iterator) CommentEditValue() userContentEdit { + if i.timeline.commentEdit.index == -2 { + return i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.Nodes[i.commentEdit.index] + } + + return i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index].Node.IssueComment.UserContentEdits.Nodes[i.timeline.commentEdit.index] +} + +func min(a, b int) int { + if a > b { + return b + } + + return a +} diff --git a/bridge/github/iterator_test.go b/bridge/github/iterator_test.go new file mode 100644 index 00000000..c5820973 --- /dev/null +++ b/bridge/github/iterator_test.go @@ -0,0 +1,44 @@ +package github + +import ( + "fmt" + "os" + "testing" + "time" +) + +func Test_Iterator(t *testing.T) { + token := os.Getenv("GITHUB_TOKEN") + user := os.Getenv("GITHUB_USER") + project := os.Getenv("GITHUB_PROJECT") + + i := newIterator(map[string]string{ + keyToken: token, + "user": user, + "project": project, + }, time.Now().Add(-14*24*time.Hour)) + + for i.NextIssue() { + v := i.IssueValue() + fmt.Printf("issue = id:%v title:%v\n", v.Id, v.Title) + + for i.NextIssueEdit() { + v := i.IssueEditValue() + fmt.Printf("issue edit = %v\n", string(*v.Diff)) + } + + for i.NextTimeline() { + v := i.TimelineValue() + fmt.Printf("timeline = type:%v\n", v.Typename) + + if v.Typename == "IssueComment" { + for i.NextCommentEdit() { + _ = i.CommentEditValue() + + //fmt.Printf("comment edit: %v\n", *v.Diff) + fmt.Printf("comment edit\n") + } + } + } + } +} -- cgit From 3bcaa35b5d25ca9e12389ab4bf78600ae5df8af8 Mon Sep 17 00:00:00 2001 From: Amine Hilaly Date: Sat, 27 Apr 2019 00:15:02 +0100 Subject: Integrate iterator with importer --- bridge/github/config.go | 10 +- bridge/github/import.go | 594 ++++++++++++++--------------------------- bridge/github/iterator.go | 29 +- bridge/github/iterator_test.go | 10 +- 4 files changed, 222 insertions(+), 421 deletions(-) diff --git a/bridge/github/config.go b/bridge/github/config.go index b881c585..2a3119a6 100644 --- a/bridge/github/config.go +++ b/bridge/github/config.go @@ -20,10 +20,12 @@ import ( "golang.org/x/crypto/ssh/terminal" ) -const githubV3Url = "https://api.github.com" -const keyUser = "user" -const keyProject = "project" -const keyToken = "token" +const ( + githubV3Url = "https://api.github.com" + keyUser = "user" + keyProject = "project" + keyToken = "token" +) func (*Github) Configure(repo repository.RepoCommon) (core.Configuration, error) { conf := make(core.Configuration) diff --git a/bridge/github/import.go b/bridge/github/import.go index d641b192..74ccb776 100644 --- a/bridge/github/import.go +++ b/bridge/github/import.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "strings" + "time" "github.com/MichaelMure/git-bug/bridge/core" "github.com/MichaelMure/git-bug/bug" @@ -13,308 +14,250 @@ import ( "github.com/shurcooL/githubv4" ) -const keyGithubId = "github-id" -const keyGithubUrl = "github-url" -const keyGithubLogin = "github-login" +const ( + keyGithubId = "github-id" + keyGithubUrl = "github-url" + keyGithubLogin = "github-login" +) // githubImporter implement the Importer interface type githubImporter struct { - client *githubv4.Client - conf core.Configuration + iterator *iterator + conf core.Configuration } func (gi *githubImporter) Init(conf core.Configuration) error { - gi.conf = conf - gi.client = buildClient(conf) - - return nil -} + var since time.Time -func (gi *githubImporter) ImportAll(repo *cache.RepoCache) error { - q := &issueTimelineQuery{} - variables := map[string]interface{}{ - "owner": githubv4.String(gi.conf[keyUser]), - "name": githubv4.String(gi.conf[keyProject]), - "issueFirst": githubv4.Int(1), - "issueAfter": (*githubv4.String)(nil), - "timelineFirst": githubv4.Int(10), - "timelineAfter": (*githubv4.String)(nil), - - // Fun fact, github provide the comment edition in reverse chronological - // order, because haha. Look at me, I'm dying of laughter. - "issueEditLast": githubv4.Int(10), - "issueEditBefore": (*githubv4.String)(nil), - "commentEditLast": githubv4.Int(10), - "commentEditBefore": (*githubv4.String)(nil), - } - - var b *cache.BugCache - - for { - err := gi.client.Query(context.TODO(), &q, variables) + // parse since value from configuration + if value, ok := conf["since"]; ok && value != "" { + s, err := time.Parse(time.RFC3339, value) if err != nil { return err } - if len(q.Repository.Issues.Nodes) == 0 { - return nil - } - - issue := q.Repository.Issues.Nodes[0] - - if b == nil { - b, err = gi.ensureIssue(repo, issue, variables) - if err != nil { - return err - } - } - - for _, itemEdge := range q.Repository.Issues.Nodes[0].Timeline.Edges { - err = gi.ensureTimelineItem(repo, b, itemEdge.Cursor, itemEdge.Node, variables) - if err != nil { - return err - } - } - - if !issue.Timeline.PageInfo.HasNextPage { - err = b.CommitAsNeeded() - if err != nil { - return err - } - - b = nil - - if !q.Repository.Issues.PageInfo.HasNextPage { - break - } - - variables["issueAfter"] = githubv4.NewString(q.Repository.Issues.PageInfo.EndCursor) - variables["timelineAfter"] = (*githubv4.String)(nil) - continue - } - - variables["timelineAfter"] = githubv4.NewString(issue.Timeline.PageInfo.EndCursor) + since = s } + gi.iterator = newIterator(conf, since) return nil } -func (gi *githubImporter) Import(repo *cache.RepoCache, id string) error { - fmt.Println("IMPORT") - - return nil -} - -func (gi *githubImporter) ensureIssue(repo *cache.RepoCache, issue issueTimeline, rootVariables map[string]interface{}) (*cache.BugCache, error) { - fmt.Printf("import issue: %s\n", issue.Title) - - author, err := gi.ensurePerson(repo, issue.Author) - if err != nil { - return nil, err - } - - b, err := repo.ResolveBugCreateMetadata(keyGithubId, parseId(issue.Id)) - if err != nil && err != bug.ErrBugNotExist { - return nil, err - } - - // if there is no edit, the UserContentEdits given by github is empty. That - // means that the original message is given by the issue message. - // - // if there is edits, the UserContentEdits given by github contains both the - // original message and the following edits. The issue message give the last - // version so we don't care about that. - // - // the tricky part: for an issue older than the UserContentEdits API, github - // doesn't have the previous message version anymore and give an edition - // with .Diff == nil. We have to filter them. - - if len(issue.UserContentEdits.Nodes) == 0 { - if err == bug.ErrBugNotExist { - b, err = repo.NewBugRaw( - author, - issue.CreatedAt.Unix(), - // Todo: this might not be the initial title, we need to query the - // timeline to be sure - issue.Title, - cleanupText(string(issue.Body)), - nil, - map[string]string{ - keyGithubId: parseId(issue.Id), - keyGithubUrl: issue.Url.String(), - }, - ) - if err != nil { - return nil, err - } - } - - return b, nil - } - - // reverse the order, because github - reverseEdits(issue.UserContentEdits.Nodes) - - for i, edit := range issue.UserContentEdits.Nodes { - if b != nil && i == 0 { - // The first edit in the github result is the creation itself, we already have that - continue - } - - if b == nil { - if edit.Diff == nil { - // not enough data given by github for old edit, ignore them - continue - } - - // we create the bug as soon as we have a legit first edition - b, err = repo.NewBugRaw( - author, - issue.CreatedAt.Unix(), - // Todo: this might not be the initial title, we need to query the - // timeline to be sure - issue.Title, - cleanupText(string(*edit.Diff)), - nil, - map[string]string{ - keyGithubId: parseId(issue.Id), - keyGithubUrl: issue.Url.String(), - }, - ) - if err != nil { - return nil, err - } - continue - } +func (gi *githubImporter) ImportAll(repo *cache.RepoCache) error { + // Loop over all available issues + for gi.iterator.NextIssue() { + issue := gi.iterator.IssueValue() + fmt.Printf("importing issue: %v\n", issue.Title) - target, err := b.ResolveOperationWithMetadata(keyGithubId, parseId(issue.Id)) - if err != nil { - return nil, err - } + // In each iteration create a new bug + var b *cache.BugCache - err = gi.ensureCommentEdit(repo, b, target, edit) + // ensure issue author + author, err := gi.ensurePerson(repo, issue.Author) if err != nil { - return nil, err - } - } - - if !issue.UserContentEdits.PageInfo.HasNextPage { - // if we still didn't get a legit edit, create the bug from the issue data - if b == nil { - return repo.NewBugRaw( - author, - issue.CreatedAt.Unix(), - // Todo: this might not be the initial title, we need to query the - // timeline to be sure - issue.Title, - cleanupText(string(issue.Body)), - nil, - map[string]string{ - keyGithubId: parseId(issue.Id), - keyGithubUrl: issue.Url.String(), - }, - ) + return err } - return b, nil - } - // We have more edit, querying them - - q := &issueEditQuery{} - variables := map[string]interface{}{ - "owner": rootVariables["owner"], - "name": rootVariables["name"], - "issueFirst": rootVariables["issueFirst"], - "issueAfter": rootVariables["issueAfter"], - "issueEditLast": githubv4.Int(10), - "issueEditBefore": issue.UserContentEdits.PageInfo.StartCursor, - } - - for { - err := gi.client.Query(context.TODO(), &q, variables) - if err != nil { - return nil, err + // resolve bug + b, err = repo.ResolveBugCreateMetadata(keyGithubId, parseId(issue.Id)) + if err != nil && err != bug.ErrBugNotExist { + return err } - edits := q.Repository.Issues.Nodes[0].UserContentEdits - - if len(edits.Nodes) == 0 { - return b, nil + // get issue edits + issueEdits := []userContentEdit{} + for gi.iterator.NextIssueEdit() { + // append only edits with non empty diff + if issueEdit := gi.iterator.IssueEditValue(); issueEdit.Diff != nil { + issueEdits = append(issueEdits, issueEdit) + } } - for _, edit := range edits.Nodes { - if b == nil { - if edit.Diff == nil { - // not enough data given by github for old edit, ignore them - continue - } - - // we create the bug as soon as we have a legit first edition + // if issueEdits is empty + if len(issueEdits) == 0 { + if err == bug.ErrBugNotExist { + // create bug b, err = repo.NewBugRaw( author, issue.CreatedAt.Unix(), - // Todo: this might not be the initial title, we need to query the - // timeline to be sure issue.Title, - cleanupText(string(*edit.Diff)), + cleanupText(string(issue.Body)), nil, map[string]string{ keyGithubId: parseId(issue.Id), keyGithubUrl: issue.Url.String(), - }, - ) + }) if err != nil { - return nil, err + return err } - continue } + } else { + // create bug from given issueEdits + for _, edit := range issueEdits { + // if the bug doesn't exist + if b == nil { + // we create the bug as soon as we have a legit first edition + b, err = repo.NewBugRaw( + author, + issue.CreatedAt.Unix(), + issue.Title, + cleanupText(string(*edit.Diff)), + nil, + map[string]string{ + keyGithubId: parseId(issue.Id), + keyGithubUrl: issue.Url.String(), + }, + ) + + if err != nil { + return err + } - target, err := b.ResolveOperationWithMetadata(keyGithubId, parseId(issue.Id)) - if err != nil { - return nil, err + continue + } + + // other edits will be added as CommentEdit operations + + target, err := b.ResolveOperationWithMetadata(keyGithubId, parseId(issue.Id)) + if err != nil { + return err + } + + err = gi.ensureCommentEdit(repo, b, target, edit) + if err != nil { + return err + } } + } + + // check timeline items + for gi.iterator.NextTimeline() { + item := gi.iterator.TimelineValue() + + // if item is not a comment (label, unlabel, rename, close, open ...) + if item.Typename != "IssueComment" { + if err := gi.ensureTimelineItem(repo, b, item); err != nil { + return err + } + } else { // if item is comment + + // ensure person + author, err := gi.ensurePerson(repo, item.IssueComment.Author) + if err != nil { + return err + } + + var target git.Hash + target, err = b.ResolveOperationWithMetadata(keyGithubId, parseId(item.IssueComment.Id)) + if err != nil && err != cache.ErrNoMatchingOp { + // real error + return err + } + + // collect all edits + commentEdits := []userContentEdit{} + for gi.iterator.NextCommentEdit() { + if commentEdit := gi.iterator.CommentEditValue(); commentEdit.Diff != nil { + commentEdits = append(commentEdits, commentEdit) + } + } + + // if no edits are given we create the comment + if len(commentEdits) == 0 { + + // if comment doesn't exist + if err == cache.ErrNoMatchingOp { + + // add comment operation + op, err := b.AddCommentRaw( + author, + item.IssueComment.CreatedAt.Unix(), + cleanupText(string(item.IssueComment.Body)), + nil, + map[string]string{ + keyGithubId: parseId(item.IssueComment.Id), + }, + ) + if err != nil { + return err + } + + // set hash + target, err = op.Hash() + if err != nil { + return err + } + } + } else { + // if we have some edits + for _, edit := range item.IssueComment.UserContentEdits.Nodes { + + // create comment when target is an empty string + if target == "" { + op, err := b.AddCommentRaw( + author, + item.IssueComment.CreatedAt.Unix(), + cleanupText(string(*edit.Diff)), + nil, + map[string]string{ + keyGithubId: parseId(item.IssueComment.Id), + keyGithubUrl: item.IssueComment.Url.String(), + }, + ) + if err != nil { + return err + } + + // set hash + target, err = op.Hash() + if err != nil { + return err + } + } + + err := gi.ensureCommentEdit(repo, b, target, edit) + if err != nil { + return err + } + + } + } - err = gi.ensureCommentEdit(repo, b, target, edit) - if err != nil { - return nil, err } + } - if !edits.PageInfo.HasNextPage { - break + if err := gi.iterator.Error(); err != nil { + fmt.Printf("error importing issue %v\n", issue.Id) + return err } - variables["issueEditBefore"] = edits.PageInfo.StartCursor + // commit bug state + err = b.CommitAsNeeded() + if err != nil { + return err + } } - // TODO: check + import files - - // if we still didn't get a legit edit, create the bug from the issue data - if b == nil { - return repo.NewBugRaw( - author, - issue.CreatedAt.Unix(), - // Todo: this might not be the initial title, we need to query the - // timeline to be sure - issue.Title, - cleanupText(string(issue.Body)), - nil, - map[string]string{ - keyGithubId: parseId(issue.Id), - keyGithubUrl: issue.Url.String(), - }, - ) + if err := gi.iterator.Error(); err != nil { + fmt.Printf("import error: %v\n", err) } - return b, nil + fmt.Printf("Successfully imported %v issues from Github\n", gi.iterator.Count()) + return nil } -func (gi *githubImporter) ensureTimelineItem(repo *cache.RepoCache, b *cache.BugCache, cursor githubv4.String, item timelineItem, rootVariables map[string]interface{}) error { - fmt.Printf("import %s\n", item.Typename) +func (gi *githubImporter) Import(repo *cache.RepoCache, id string) error { + fmt.Println("IMPORT") + return nil +} + +func (gi *githubImporter) ensureTimelineItem(repo *cache.RepoCache, b *cache.BugCache, item timelineItem) error { + fmt.Printf("import item: %s\n", item.Typename) switch item.Typename { case "IssueComment": - return gi.ensureComment(repo, b, cursor, item.IssueComment, rootVariables) + //return gi.ensureComment(repo, b, cursor, item.IssueComment, rootVariables) case "LabeledEvent": id := parseId(item.LabeledEvent.Id) @@ -411,162 +354,13 @@ func (gi *githubImporter) ensureTimelineItem(repo *cache.RepoCache, b *cache.Bug return err default: - fmt.Println("ignore event ", item.Typename) + fmt.Printf("ignore event: %v\n", item.Typename) } return nil } -func (gi *githubImporter) ensureComment(repo *cache.RepoCache, b *cache.BugCache, cursor githubv4.String, comment issueComment, rootVariables map[string]interface{}) error { - author, err := gi.ensurePerson(repo, comment.Author) - if err != nil { - return err - } - - var target git.Hash - target, err = b.ResolveOperationWithMetadata(keyGithubId, parseId(comment.Id)) - if err != nil && err != cache.ErrNoMatchingOp { - // real error - return err - } - - // if there is no edit, the UserContentEdits given by github is empty. That - // means that the original message is given by the comment message. - // - // if there is edits, the UserContentEdits given by github contains both the - // original message and the following edits. The comment message give the last - // version so we don't care about that. - // - // the tricky part: for a comment older than the UserContentEdits API, github - // doesn't have the previous message version anymore and give an edition - // with .Diff == nil. We have to filter them. - - if len(comment.UserContentEdits.Nodes) == 0 { - if err == cache.ErrNoMatchingOp { - op, err := b.AddCommentRaw( - author, - comment.CreatedAt.Unix(), - cleanupText(string(comment.Body)), - nil, - map[string]string{ - keyGithubId: parseId(comment.Id), - }, - ) - if err != nil { - return err - } - - target, err = op.Hash() - if err != nil { - return err - } - } - - return nil - } - - // reverse the order, because github - reverseEdits(comment.UserContentEdits.Nodes) - - for i, edit := range comment.UserContentEdits.Nodes { - if target != "" && i == 0 { - // The first edit in the github result is the comment creation itself, we already have that - continue - } - - if target == "" { - if edit.Diff == nil { - // not enough data given by github for old edit, ignore them - continue - } - - op, err := b.AddCommentRaw( - author, - comment.CreatedAt.Unix(), - cleanupText(string(*edit.Diff)), - nil, - map[string]string{ - keyGithubId: parseId(comment.Id), - keyGithubUrl: comment.Url.String(), - }, - ) - if err != nil { - return err - } - - target, err = op.Hash() - if err != nil { - return err - } - } - - err := gi.ensureCommentEdit(repo, b, target, edit) - if err != nil { - return err - } - } - - if !comment.UserContentEdits.PageInfo.HasNextPage { - return nil - } - - // We have more edit, querying them - - q := &commentEditQuery{} - variables := map[string]interface{}{ - "owner": rootVariables["owner"], - "name": rootVariables["name"], - "issueFirst": rootVariables["issueFirst"], - "issueAfter": rootVariables["issueAfter"], - "timelineFirst": githubv4.Int(1), - "timelineAfter": cursor, - "commentEditLast": githubv4.Int(10), - "commentEditBefore": comment.UserContentEdits.PageInfo.StartCursor, - } - - for { - err := gi.client.Query(context.TODO(), &q, variables) - if err != nil { - return err - } - - edits := q.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits - - if len(edits.Nodes) == 0 { - return nil - } - - for i, edit := range edits.Nodes { - if i == 0 { - // The first edit in the github result is the creation itself, we already have that - continue - } - - err := gi.ensureCommentEdit(repo, b, target, edit) - if err != nil { - return err - } - } - - if !edits.PageInfo.HasNextPage { - break - } - - variables["commentEditBefore"] = edits.PageInfo.StartCursor - } - - // TODO: check + import files - - return nil -} - func (gi *githubImporter) ensureCommentEdit(repo *cache.RepoCache, b *cache.BugCache, target git.Hash, edit userContentEdit) error { - if edit.Diff == nil { - // this happen if the event is older than early 2018, Github doesn't have the data before that. - // Best we can do is to ignore the event. - return nil - } - _, err := b.ResolveOperationWithMetadata(keyGithubId, parseId(edit.Id)) if err == nil { // already imported @@ -670,7 +464,7 @@ func (gi *githubImporter) getGhost(repo *cache.RepoCache) (*cache.IdentityCache, "login": githubv4.String("ghost"), } - err = gi.client.Query(context.TODO(), &q, variables) + err = gi.iterator.gc.Query(context.TODO(), &q, variables) if err != nil { return nil, err } diff --git a/bridge/github/iterator.go b/bridge/github/iterator.go index cb7c9760..9e1ff30e 100644 --- a/bridge/github/iterator.go +++ b/bridge/github/iterator.go @@ -46,6 +46,8 @@ type timelineIterator struct { issueEdit indexer commentEdit indexer + + lastEndCursor githubv4.String // storing timeline end cursor for future use } type iterator struct { @@ -81,9 +83,8 @@ func newIterator(conf core.Configuration, since time.Time) *iterator { return &iterator{ since: since, gc: buildClient(conf), - capacity: 8, - count: -1, - + capacity: 10, + count: 0, timeline: timelineIterator{ index: -1, issueEdit: indexer{-1}, @@ -154,19 +155,20 @@ func (i *iterator) reverseTimelineEditNodes() { } } -// Error . +// Error return last encountered error func (i *iterator) Error() error { return i.err } -// Count . +// Count return number of issues we iterated over func (i *iterator) Count() int { return i.count } +// Next issue func (i *iterator) NextIssue() bool { // we make the first move - if i.count == -1 { + if i.count == 0 { // init variables and goto queryIssue block i.initTimelineQueryVariables() @@ -181,11 +183,14 @@ func (i *iterator) NextIssue() bool { return false } - // if we have more pages updates variables and query them + // if we have more issues, query them i.timeline.variables["timelineAfter"] = (*githubv4.String)(nil) i.timeline.variables["issueAfter"] = i.timeline.query.Repository.Issues.PageInfo.EndCursor i.timeline.index = -1 + // store cursor for future use + i.timeline.lastEndCursor = i.timeline.query.Repository.Issues.Nodes[0].Timeline.PageInfo.EndCursor + // query issue block queryIssue: if err := i.gc.Query(context.TODO(), &i.timeline.query, i.timeline.variables); err != nil { @@ -224,6 +229,8 @@ func (i *iterator) NextTimeline() bool { return false } + i.timeline.lastEndCursor = i.timeline.query.Repository.Issues.Nodes[0].Timeline.PageInfo.EndCursor + // more timelines, query them i.timeline.variables["timelineAfter"] = i.timeline.query.Repository.Issues.Nodes[0].Timeline.PageInfo.EndCursor if err := i.gc.Query(context.TODO(), &i.timeline.query, i.timeline.variables); err != nil { @@ -240,10 +247,6 @@ func (i *iterator) TimelineValue() timelineItem { return i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index].Node } -func (i *iterator) timelineCursor() string { - return "" -} - func (i *iterator) NextIssueEdit() bool { if i.err != nil { return false @@ -359,11 +362,9 @@ func (i *iterator) NextCommentEdit() bool { return false } - // if there is more comment edits, query them - i.initCommentEditQueryVariables() if i.timeline.index == 0 { - i.commentEdit.variables["timelineAfter"] = i.timeline.query.Repository.Issues.Nodes[0].Timeline.PageInfo.EndCursor + i.commentEdit.variables["timelineAfter"] = i.timeline.lastEndCursor } else { i.commentEdit.variables["timelineAfter"] = i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index-1].Cursor } diff --git a/bridge/github/iterator_test.go b/bridge/github/iterator_test.go index c5820973..c5fad349 100644 --- a/bridge/github/iterator_test.go +++ b/bridge/github/iterator_test.go @@ -16,11 +16,12 @@ func Test_Iterator(t *testing.T) { keyToken: token, "user": user, "project": project, - }, time.Now().Add(-14*24*time.Hour)) + }, time.Time{}) + //time.Now().Add(-14*24*time.Hour)) for i.NextIssue() { v := i.IssueValue() - fmt.Printf("issue = id:%v title:%v\n", v.Id, v.Title) + fmt.Printf(" issue = id:%v title:%v\n", v.Id, v.Title) for i.NextIssueEdit() { v := i.IssueEditValue() @@ -33,12 +34,15 @@ func Test_Iterator(t *testing.T) { if v.Typename == "IssueComment" { for i.NextCommentEdit() { + _ = i.CommentEditValue() - //fmt.Printf("comment edit: %v\n", *v.Diff) fmt.Printf("comment edit\n") } } } } + + fmt.Println(i.Error()) + fmt.Println(i.Count()) } -- cgit From 0d976f66e87b7c053b10d50fe0849f6c8e5412e6 Mon Sep 17 00:00:00 2001 From: Amine Hilaly Date: Fri, 3 May 2019 00:02:50 +0200 Subject: Add importer tests Changes to Importer and exporter interface Improve importer Fix extra edits bug --- bridge/core/bridge.go | 47 +----- bridge/core/interfaces.go | 8 +- bridge/github/import.go | 376 +++++++++++++++++++++-------------------- bridge/github/import_test.go | 223 ++++++++++++++++++++++++ bridge/github/iterator.go | 25 +-- bridge/github/iterator_test.go | 48 ------ bridge/launchpad/import.go | 7 +- commands/bridge_pull.go | 5 +- 8 files changed, 434 insertions(+), 305 deletions(-) create mode 100644 bridge/github/import_test.go delete mode 100644 bridge/github/iterator_test.go diff --git a/bridge/core/bridge.go b/bridge/core/bridge.go index b849bec6..aa02ceb5 100644 --- a/bridge/core/bridge.go +++ b/bridge/core/bridge.go @@ -6,6 +6,7 @@ import ( "reflect" "regexp" "strings" + "time" "github.com/MichaelMure/git-bug/cache" "github.com/MichaelMure/git-bug/repository" @@ -265,7 +266,7 @@ func (b *Bridge) ensureInit() error { return nil } -func (b *Bridge) ImportAll() error { +func (b *Bridge) ImportAll(since time.Time) error { importer := b.getImporter() if importer == nil { return ErrImportNotSupported @@ -281,48 +282,10 @@ func (b *Bridge) ImportAll() error { return err } - return importer.ImportAll(b.repo) + return importer.ImportAll(b.repo, since) } -func (b *Bridge) Import(id string) error { - importer := b.getImporter() - if importer == nil { - return ErrImportNotSupported - } - - err := b.ensureConfig() - if err != nil { - return err - } - - err = b.ensureInit() - if err != nil { - return err - } - - return importer.Import(b.repo, id) -} - -func (b *Bridge) ExportAll() error { - exporter := b.getExporter() - if exporter == nil { - return ErrExportNotSupported - } - - err := b.ensureConfig() - if err != nil { - return err - } - - err = b.ensureInit() - if err != nil { - return err - } - - return exporter.ExportAll(b.repo) -} - -func (b *Bridge) Export(id string) error { +func (b *Bridge) ExportAll(since time.Time) error { exporter := b.getExporter() if exporter == nil { return ErrExportNotSupported @@ -338,5 +301,5 @@ func (b *Bridge) Export(id string) error { return err } - return exporter.Export(b.repo, id) + return exporter.ExportAll(b.repo, since) } diff --git a/bridge/core/interfaces.go b/bridge/core/interfaces.go index 4836dab3..be5afa62 100644 --- a/bridge/core/interfaces.go +++ b/bridge/core/interfaces.go @@ -1,6 +1,8 @@ package core import ( + "time" + "github.com/MichaelMure/git-bug/cache" "github.com/MichaelMure/git-bug/repository" ) @@ -27,12 +29,10 @@ type BridgeImpl interface { type Importer interface { Init(conf Configuration) error - ImportAll(repo *cache.RepoCache) error - Import(repo *cache.RepoCache, id string) error + ImportAll(repo *cache.RepoCache, since time.Time) error } type Exporter interface { Init(conf Configuration) error - ExportAll(repo *cache.RepoCache) error - Export(repo *cache.RepoCache, id string) error + ExportAll(repo *cache.RepoCache, since time.Time) error } diff --git a/bridge/github/import.go b/bridge/github/import.go index 74ccb776..4960117a 100644 --- a/bridge/github/import.go +++ b/bridge/github/import.go @@ -27,237 +27,157 @@ type githubImporter struct { } func (gi *githubImporter) Init(conf core.Configuration) error { - var since time.Time - - // parse since value from configuration - if value, ok := conf["since"]; ok && value != "" { - s, err := time.Parse(time.RFC3339, value) - if err != nil { - return err - } - - since = s - } - - gi.iterator = newIterator(conf, since) + gi.conf = conf + gi.iterator = newIterator(conf) return nil } -func (gi *githubImporter) ImportAll(repo *cache.RepoCache) error { - // Loop over all available issues +// ImportAll . +func (gi *githubImporter) ImportAll(repo *cache.RepoCache, since time.Time) error { + gi.iterator.since = since + + // Loop over all matching issues for gi.iterator.NextIssue() { issue := gi.iterator.IssueValue() - fmt.Printf("importing issue: %v\n", issue.Title) - - // In each iteration create a new bug - var b *cache.BugCache - - // ensure issue author - author, err := gi.ensurePerson(repo, issue.Author) - if err != nil { - return err - } - - // resolve bug - b, err = repo.ResolveBugCreateMetadata(keyGithubId, parseId(issue.Id)) - if err != nil && err != bug.ErrBugNotExist { - return err - } + fmt.Printf("importing issue: %v\n", gi.iterator.count) // get issue edits issueEdits := []userContentEdit{} for gi.iterator.NextIssueEdit() { - // append only edits with non empty diff - if issueEdit := gi.iterator.IssueEditValue(); issueEdit.Diff != nil { + if issueEdit := gi.iterator.IssueEditValue(); issueEdit.Diff != nil && string(*issueEdit.Diff) != "" { issueEdits = append(issueEdits, issueEdit) } } - // if issueEdits is empty - if len(issueEdits) == 0 { - if err == bug.ErrBugNotExist { - // create bug - b, err = repo.NewBugRaw( - author, - issue.CreatedAt.Unix(), - issue.Title, - cleanupText(string(issue.Body)), - nil, - map[string]string{ - keyGithubId: parseId(issue.Id), - keyGithubUrl: issue.Url.String(), - }) - if err != nil { - return err - } - } - } else { - // create bug from given issueEdits - for _, edit := range issueEdits { - // if the bug doesn't exist - if b == nil { - // we create the bug as soon as we have a legit first edition - b, err = repo.NewBugRaw( - author, - issue.CreatedAt.Unix(), - issue.Title, - cleanupText(string(*edit.Diff)), - nil, - map[string]string{ - keyGithubId: parseId(issue.Id), - keyGithubUrl: issue.Url.String(), - }, - ) - - if err != nil { - return err - } - - continue - } - - // other edits will be added as CommentEdit operations - - target, err := b.ResolveOperationWithMetadata(keyGithubId, parseId(issue.Id)) - if err != nil { - return err - } - - err = gi.ensureCommentEdit(repo, b, target, edit) - if err != nil { - return err - } - } + // create issue + b, err := gi.ensureIssue(repo, issue, issueEdits) + if err != nil { + return fmt.Errorf("issue creation: %v", err) } - // check timeline items + // loop over timeline items for gi.iterator.NextTimeline() { item := gi.iterator.TimelineValue() - // if item is not a comment (label, unlabel, rename, close, open ...) - if item.Typename != "IssueComment" { - if err := gi.ensureTimelineItem(repo, b, item); err != nil { - return err - } - } else { // if item is comment - - // ensure person - author, err := gi.ensurePerson(repo, item.IssueComment.Author) - if err != nil { - return err - } - - var target git.Hash - target, err = b.ResolveOperationWithMetadata(keyGithubId, parseId(item.IssueComment.Id)) - if err != nil && err != cache.ErrNoMatchingOp { - // real error - return err - } - + // if item is comment + if item.Typename == "IssueComment" { // collect all edits commentEdits := []userContentEdit{} for gi.iterator.NextCommentEdit() { - if commentEdit := gi.iterator.CommentEditValue(); commentEdit.Diff != nil { + if commentEdit := gi.iterator.CommentEditValue(); commentEdit.Diff != nil && string(*commentEdit.Diff) != "" { commentEdits = append(commentEdits, commentEdit) } } - // if no edits are given we create the comment - if len(commentEdits) == 0 { - - // if comment doesn't exist - if err == cache.ErrNoMatchingOp { - - // add comment operation - op, err := b.AddCommentRaw( - author, - item.IssueComment.CreatedAt.Unix(), - cleanupText(string(item.IssueComment.Body)), - nil, - map[string]string{ - keyGithubId: parseId(item.IssueComment.Id), - }, - ) - if err != nil { - return err - } - - // set hash - target, err = op.Hash() - if err != nil { - return err - } - } - } else { - // if we have some edits - for _, edit := range item.IssueComment.UserContentEdits.Nodes { - - // create comment when target is an empty string - if target == "" { - op, err := b.AddCommentRaw( - author, - item.IssueComment.CreatedAt.Unix(), - cleanupText(string(*edit.Diff)), - nil, - map[string]string{ - keyGithubId: parseId(item.IssueComment.Id), - keyGithubUrl: item.IssueComment.Url.String(), - }, - ) - if err != nil { - return err - } - - // set hash - target, err = op.Hash() - if err != nil { - return err - } - } - - err := gi.ensureCommentEdit(repo, b, target, edit) - if err != nil { - return err - } - - } + err := gi.ensureTimelineComment(repo, b, item.IssueComment, commentEdits) + if err != nil { + return fmt.Errorf("timeline event creation: %v", err) } + } else { + if err := gi.ensureTimelineItem(repo, b, item); err != nil { + return fmt.Errorf("timeline comment creation: %v", err) + } } - - } - - if err := gi.iterator.Error(); err != nil { - fmt.Printf("error importing issue %v\n", issue.Id) - return err } // commit bug state - err = b.CommitAsNeeded() - if err != nil { - return err + if err := b.CommitAsNeeded(); err != nil { + return fmt.Errorf("bug commit: %v", err) } } if err := gi.iterator.Error(); err != nil { fmt.Printf("import error: %v\n", err) + return err } fmt.Printf("Successfully imported %v issues from Github\n", gi.iterator.Count()) return nil } -func (gi *githubImporter) Import(repo *cache.RepoCache, id string) error { - fmt.Println("IMPORT") - return nil +func (gi *githubImporter) ensureIssue(repo *cache.RepoCache, issue issueTimeline, issueEdits []userContentEdit) (*cache.BugCache, error) { + // ensure issue author + author, err := gi.ensurePerson(repo, issue.Author) + if err != nil { + return nil, err + } + + // resolve bug + b, err := repo.ResolveBugCreateMetadata(keyGithubUrl, issue.Url.String()) + if err != nil && err != bug.ErrBugNotExist { + return nil, err + } + + // if issueEdits is empty + if len(issueEdits) == 0 { + if err == bug.ErrBugNotExist { + // create bug + b, err = repo.NewBugRaw( + author, + issue.CreatedAt.Unix(), + issue.Title, + cleanupText(string(issue.Body)), + nil, + map[string]string{ + keyGithubId: parseId(issue.Id), + keyGithubUrl: issue.Url.String(), + }) + if err != nil { + return nil, err + } + } + + } else { + // create bug from given issueEdits + for i, edit := range issueEdits { + if i == 0 && b != nil { + continue + } + + // if the bug doesn't exist + if b == nil { + // we create the bug as soon as we have a legit first edition + b, err = repo.NewBugRaw( + author, + issue.CreatedAt.Unix(), + issue.Title, + cleanupText(string(*edit.Diff)), + nil, + map[string]string{ + keyGithubId: parseId(issue.Id), + keyGithubUrl: issue.Url.String(), + }, + ) + + if err != nil { + return nil, err + } + + continue + } + + // other edits will be added as CommentEdit operations + target, err := b.ResolveOperationWithMetadata(keyGithubUrl, issue.Url.String()) + if err != nil { + return nil, err + } + + err = gi.ensureCommentEdit(repo, b, target, edit) + if err != nil { + return nil, err + } + } + } + + return b, nil } func (gi *githubImporter) ensureTimelineItem(repo *cache.RepoCache, b *cache.BugCache, item timelineItem) error { - fmt.Printf("import item: %s\n", item.Typename) + fmt.Printf("import event item: %s\n", item.Typename) switch item.Typename { case "IssueComment": - //return gi.ensureComment(repo, b, cursor, item.IssueComment, rootVariables) case "LabeledEvent": id := parseId(item.LabeledEvent.Id) @@ -290,6 +210,7 @@ func (gi *githubImporter) ensureTimelineItem(repo *cache.RepoCache, b *cache.Bug if err != nil { return err } + _, _, err = b.ChangeLabelsRaw( author, item.UnlabeledEvent.CreatedAt.Unix(), @@ -360,6 +281,92 @@ func (gi *githubImporter) ensureTimelineItem(repo *cache.RepoCache, b *cache.Bug return nil } +func (gi *githubImporter) ensureTimelineComment(repo *cache.RepoCache, b *cache.BugCache, item issueComment, edits []userContentEdit) error { + // ensure person + author, err := gi.ensurePerson(repo, item.Author) + if err != nil { + return err + } + + var target git.Hash + target, err = b.ResolveOperationWithMetadata(keyGithubId, parseId(item.Id)) + if err != nil && err != cache.ErrNoMatchingOp { + // real error + return err + } + // if no edits are given we create the comment + if len(edits) == 0 { + + // if comment doesn't exist + if err == cache.ErrNoMatchingOp { + + // add comment operation + op, err := b.AddCommentRaw( + author, + item.CreatedAt.Unix(), + cleanupText(string(item.Body)), + nil, + map[string]string{ + keyGithubId: parseId(item.Id), + keyGithubUrl: parseId(item.Url.String()), + }, + ) + if err != nil { + return err + } + + // set hash + target, err = op.Hash() + if err != nil { + return err + } + } + } else { + for i, edit := range item.UserContentEdits.Nodes { + if i == 0 && target != "" { + continue + } + + // ensure editor identity + editor, err := gi.ensurePerson(repo, edit.Editor) + if err != nil { + return err + } + + // create comment when target is empty + if target == "" { + op, err := b.AddCommentRaw( + editor, + edit.CreatedAt.Unix(), + cleanupText(string(*edit.Diff)), + nil, + map[string]string{ + keyGithubId: parseId(item.Id), + keyGithubUrl: item.Url.String(), + }, + ) + if err != nil { + return err + } + + // set hash + target, err = op.Hash() + if err != nil { + return err + } + + continue + } + + err = gi.ensureCommentEdit(repo, b, target, edit) + if err != nil { + return err + } + } + } + return nil +} + func (gi *githubImporter) ensureCommentEdit(repo *cache.RepoCache, b *cache.BugCache, target git.Hash, edit userContentEdit) error { _, err := b.ResolveOperationWithMetadata(keyGithubId, parseId(edit.Id)) if err == nil { @@ -381,8 +388,10 @@ func (gi *githubImporter) ensureCommentEdit(repo *cache.RepoCache, b *cache.BugC switch { case edit.DeletedAt != nil: // comment deletion, not supported yet + fmt.Println("comment deletion ....") case edit.DeletedAt == nil: + // comment edition _, err := b.EditCommentRaw( editor, @@ -393,6 +402,7 @@ func (gi *githubImporter) ensureCommentEdit(repo *cache.RepoCache, b *cache.BugC keyGithubId: parseId(edit.Id), }, ) + if err != nil { return err } diff --git a/bridge/github/import_test.go b/bridge/github/import_test.go new file mode 100644 index 00000000..d64f0b4b --- /dev/null +++ b/bridge/github/import_test.go @@ -0,0 +1,223 @@ +package github + +import ( + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/MichaelMure/git-bug/bridge/core" + "github.com/MichaelMure/git-bug/bug" + "github.com/MichaelMure/git-bug/cache" + "github.com/MichaelMure/git-bug/identity" + "github.com/MichaelMure/git-bug/repository" + "github.com/MichaelMure/git-bug/util/interrupt" +) + +func Test_Importer(t *testing.T) { + author := identity.NewIdentity("Michael Muré", "batolettre@gmail.com") + tests := []struct { + name string + exist bool + url string + bug *bug.Snapshot + }{ + { + name: "simple issue", + exist: true, + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/1", + bug: &bug.Snapshot{ + Operations: []bug.Operation{ + bug.NewCreateOp(author, 0, "simple issue", "initial comment", nil), + bug.NewAddCommentOp(author, 0, "first comment", nil), + bug.NewAddCommentOp(author, 0, "second comment", nil)}, + }, + }, + { + name: "empty issue", + exist: true, + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/2", + bug: &bug.Snapshot{ + Operations: []bug.Operation{ + bug.NewCreateOp(author, 0, "empty issue", "", nil), + }, + }, + }, + { + name: "complex issue", + exist: true, + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/3", + bug: &bug.Snapshot{ + Operations: []bug.Operation{ + bug.NewCreateOp(author, 0, "complex issue", "initial comment", nil), + bug.NewLabelChangeOperation(author, 0, []bug.Label{"bug"}, []bug.Label{}), + bug.NewLabelChangeOperation(author, 0, []bug.Label{"duplicate"}, []bug.Label{}), + bug.NewLabelChangeOperation(author, 0, []bug.Label{}, []bug.Label{"duplicate"}), + bug.NewAddCommentOp(author, 0, "### header\n\n**bold**\n\n_italic_\n\n> with quote\n\n`inline code`\n\n```\nmultiline code\n```\n\n- bulleted\n- list\n\n1. numbered\n1. list\n\n- [ ] task\n- [x] list\n\n@MichaelMure mention\n\n#2 reference issue\n#3 auto-reference issue\n\n![image](https://user-images.githubusercontent.com/294669/56870222-811faf80-6a0c-11e9-8f2c-f0beb686303f.png)", nil), + bug.NewSetTitleOp(author, 0, "complex issue edited", "complex issue"), + bug.NewSetTitleOp(author, 0, "complex issue", "complex issue edited"), + bug.NewSetStatusOp(author, 0, bug.ClosedStatus), + bug.NewSetStatusOp(author, 0, bug.OpenStatus), + }, + }, + }, + { + name: "editions", + exist: true, + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/4", + bug: &bug.Snapshot{ + Operations: []bug.Operation{ + bug.NewCreateOp(author, 0, "editions", "initial comment edited", nil), + bug.NewEditCommentOp(author, 0, "", "erased then edited again", nil), + bug.NewAddCommentOp(author, 0, "first comment", nil), + bug.NewEditCommentOp(author, 0, "", "first comment edited", nil), + }, + }, + }, + { + name: "comment deletion", + exist: true, + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/5", + bug: &bug.Snapshot{ + Operations: []bug.Operation{ + bug.NewCreateOp(author, 0, "comment deletion", "", nil), + }, + }, + }, + { + name: "edition deletion", + exist: true, + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/6", + bug: &bug.Snapshot{ + Operations: []bug.Operation{ + bug.NewCreateOp(author, 0, "edition deletion", "initial comment", nil), + bug.NewEditCommentOp(author, 0, "", "initial comment edited again", nil), + bug.NewAddCommentOp(author, 0, "first comment", nil), + bug.NewEditCommentOp(author, 0, "", "first comment edited again", nil), + }, + }, + }, + { + name: "hidden comment", + exist: true, + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/7", + bug: &bug.Snapshot{ + Operations: []bug.Operation{ + bug.NewCreateOp(author, 0, "hidden comment", "initial comment", nil), + bug.NewAddCommentOp(author, 0, "first comment", nil), + }, + }, + }, + { + name: "transfered issue", + exist: true, + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/8", + bug: &bug.Snapshot{ + Operations: []bug.Operation{ + bug.NewCreateOp(author, 0, "transfered issue", "", nil), + }, + }, + }, + } + + cwd, err := os.Getwd() + if err != nil { + t.Fatal(err) + } + + repo, err := repository.NewGitRepo(cwd, bug.Witnesser) + if err != nil { + t.Fatal(err) + } + + backend, err := cache.NewRepoCache(repo) + if err != nil { + t.Fatal(err) + } + + defer backend.Close() + interrupt.RegisterCleaner(backend.Close) + + importer := &githubImporter{} + err = importer.Init(core.Configuration{ + "user": "MichaelMure", + "project": "git-but-test-github-bridge", + "token": os.Getenv("GITHUB_TOKEN"), + }) + if err != nil { + t.Fatal(err) + } + + err = importer.ImportAll(backend, time.Time{}) + if err != nil { + t.Fatal(err) + } + + ids := backend.AllBugsIds() + assert.Equal(t, len(ids), 8) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + b, err := backend.ResolveBugCreateMetadata(keyGithubUrl, tt.url) + if err != nil { + t.Fatal(err) + } + + ops := b.Snapshot().Operations + if tt.exist { + assert.Equal(t, len(tt.bug.Operations), len(b.Snapshot().Operations)) + + for i, op := range tt.bug.Operations { + switch op.(type) { + case *bug.CreateOperation: + if op2, ok := ops[i].(*bug.CreateOperation); ok { + assert.Equal(t, op2.Title, op.(*bug.CreateOperation).Title) + assert.Equal(t, op2.Message, op.(*bug.CreateOperation).Message) + continue + } + t.Errorf("bad operation type index = %d expected = CreationOperation", i) + case *bug.SetStatusOperation: + if op2, ok := ops[i].(*bug.SetStatusOperation); ok { + assert.Equal(t, op2.Status, op.(*bug.SetStatusOperation).Status) + continue + } + t.Errorf("bad operation type index = %d expected = SetStatusOperation", i) + case *bug.SetTitleOperation: + if op2, ok := ops[i].(*bug.SetTitleOperation); ok { + assert.Equal(t, op.(*bug.SetTitleOperation).Was, op2.Was) + assert.Equal(t, op.(*bug.SetTitleOperation).Title, op2.Title) + continue + } + t.Errorf("bad operation type index = %d expected = SetTitleOperation", i) + case *bug.LabelChangeOperation: + if op2, ok := ops[i].(*bug.LabelChangeOperation); ok { + assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Added, op2.Added) + assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Removed, op2.Removed) + continue + } + t.Errorf("bad operation type index = %d expected = ChangeLabelOperation", i) + case *bug.AddCommentOperation: + if op2, ok := ops[i].(*bug.AddCommentOperation); ok { + assert.Equal(t, op.(*bug.AddCommentOperation).Message, op2.Message) + continue + } + t.Errorf("bad operation type index = %d expected = AddCommentOperation", i) + case *bug.EditCommentOperation: + if op2, ok := ops[i].(*bug.EditCommentOperation); ok { + assert.Equal(t, op.(*bug.EditCommentOperation).Message, op2.Message) + continue + } + t.Errorf("bad operation type index = %d expected = EditCommentOperation", i) + default: + + } + } + + } else { + assert.Equal(t, b, nil) + } + }) + } + +} diff --git a/bridge/github/iterator.go b/bridge/github/iterator.go index 9e1ff30e..281f8a6b 100644 --- a/bridge/github/iterator.go +++ b/bridge/github/iterator.go @@ -8,23 +8,6 @@ import ( "github.com/shurcooL/githubv4" ) -/** -type iterator interface { - Count() int - Error() error - - NextIssue() bool - NextIssueEdit() bool - NextTimeline() bool - NextCommentEdit() bool - - IssueValue() issueTimeline - IssueEditValue() userContentEdit - TimelineValue() timelineItem - CommentEditValue() userContentEdit -} -*/ - type indexer struct{ index int } type issueEditIterator struct { @@ -47,7 +30,8 @@ type timelineIterator struct { issueEdit indexer commentEdit indexer - lastEndCursor githubv4.String // storing timeline end cursor for future use + // lastEndCursor cache the timeline end cursor for one iteration + lastEndCursor githubv4.String } type iterator struct { @@ -59,7 +43,7 @@ type iterator struct { since time.Time // number of timelines/userEditcontent/issueEdit to query - // at a time more capacity = more used memory = less queries + // at a time, more capacity = more used memory = less queries // to make capacity int @@ -79,9 +63,8 @@ type iterator struct { commentEdit commentEditIterator } -func newIterator(conf core.Configuration, since time.Time) *iterator { +func newIterator(conf core.Configuration) *iterator { return &iterator{ - since: since, gc: buildClient(conf), capacity: 10, count: 0, diff --git a/bridge/github/iterator_test.go b/bridge/github/iterator_test.go deleted file mode 100644 index c5fad349..00000000 --- a/bridge/github/iterator_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package github - -import ( - "fmt" - "os" - "testing" - "time" -) - -func Test_Iterator(t *testing.T) { - token := os.Getenv("GITHUB_TOKEN") - user := os.Getenv("GITHUB_USER") - project := os.Getenv("GITHUB_PROJECT") - - i := newIterator(map[string]string{ - keyToken: token, - "user": user, - "project": project, - }, time.Time{}) - //time.Now().Add(-14*24*time.Hour)) - - for i.NextIssue() { - v := i.IssueValue() - fmt.Printf(" issue = id:%v title:%v\n", v.Id, v.Title) - - for i.NextIssueEdit() { - v := i.IssueEditValue() - fmt.Printf("issue edit = %v\n", string(*v.Diff)) - } - - for i.NextTimeline() { - v := i.TimelineValue() - fmt.Printf("timeline = type:%v\n", v.Typename) - - if v.Typename == "IssueComment" { - for i.NextCommentEdit() { - - _ = i.CommentEditValue() - - fmt.Printf("comment edit\n") - } - } - } - } - - fmt.Println(i.Error()) - fmt.Println(i.Count()) -} diff --git a/bridge/launchpad/import.go b/bridge/launchpad/import.go index 30ec5c3f..177ff3fc 100644 --- a/bridge/launchpad/import.go +++ b/bridge/launchpad/import.go @@ -44,7 +44,7 @@ func (li *launchpadImporter) ensurePerson(repo *cache.RepoCache, owner LPPerson) ) } -func (li *launchpadImporter) ImportAll(repo *cache.RepoCache) error { +func (li *launchpadImporter) ImportAll(repo *cache.RepoCache, since time.Time) error { lpAPI := new(launchpadAPI) err := lpAPI.Init() @@ -139,8 +139,3 @@ func (li *launchpadImporter) ImportAll(repo *cache.RepoCache) error { } return nil } - -func (li *launchpadImporter) Import(repo *cache.RepoCache, id string) error { - fmt.Println("IMPORT") - return nil -} diff --git a/commands/bridge_pull.go b/commands/bridge_pull.go index 9b251479..f9958882 100644 --- a/commands/bridge_pull.go +++ b/commands/bridge_pull.go @@ -1,6 +1,8 @@ package commands import ( + "time" + "github.com/MichaelMure/git-bug/bridge" "github.com/MichaelMure/git-bug/bridge/core" "github.com/MichaelMure/git-bug/cache" @@ -28,7 +30,8 @@ func runBridgePull(cmd *cobra.Command, args []string) error { return err } - err = b.ImportAll() + // TODO: by default import only new events + err = b.ImportAll(time.Time{}) if err != nil { return err } -- cgit From f7ea3421caa2c8957a82454255c4fdd699b70a9c Mon Sep 17 00:00:00 2001 From: Amine Hilaly Date: Sat, 4 May 2019 13:19:56 +0200 Subject: Add ForceLabelChange functionalities --- bridge/github/import.go | 11 ++++++----- bug/op_label_change.go | 27 +++++++++++++++++++++++++++ cache/bug_cache.go | 27 +++++++++++++++++++++++++++ 3 files changed, 60 insertions(+), 5 deletions(-) diff --git a/bridge/github/import.go b/bridge/github/import.go index 4960117a..5f99b5ea 100644 --- a/bridge/github/import.go +++ b/bridge/github/import.go @@ -40,7 +40,7 @@ func (gi *githubImporter) ImportAll(repo *cache.RepoCache, since time.Time) erro for gi.iterator.NextIssue() { issue := gi.iterator.IssueValue() - fmt.Printf("importing issue: %v\n", gi.iterator.count) + fmt.Printf("importing issue: %v %v\n", gi.iterator.count, issue.Title) // get issue edits issueEdits := []userContentEdit{} for gi.iterator.NextIssueEdit() { @@ -71,12 +71,12 @@ func (gi *githubImporter) ImportAll(repo *cache.RepoCache, since time.Time) erro err := gi.ensureTimelineComment(repo, b, item.IssueComment, commentEdits) if err != nil { - return fmt.Errorf("timeline event creation: %v", err) + return fmt.Errorf("timeline comment creation: %v", err) } } else { if err := gi.ensureTimelineItem(repo, b, item); err != nil { - return fmt.Errorf("timeline comment creation: %v", err) + return fmt.Errorf("timeline event creation: %v", err) } } } @@ -189,7 +189,7 @@ func (gi *githubImporter) ensureTimelineItem(repo *cache.RepoCache, b *cache.Bug if err != nil { return err } - _, _, err = b.ChangeLabelsRaw( + _, err = b.ForceChangeLabelsRaw( author, item.LabeledEvent.CreatedAt.Unix(), []string{ @@ -198,6 +198,7 @@ func (gi *githubImporter) ensureTimelineItem(repo *cache.RepoCache, b *cache.Bug nil, map[string]string{keyGithubId: id}, ) + return err case "UnlabeledEvent": @@ -211,7 +212,7 @@ func (gi *githubImporter) ensureTimelineItem(repo *cache.RepoCache, b *cache.Bug return err } - _, _, err = b.ChangeLabelsRaw( + _, err = b.ForceChangeLabelsRaw( author, item.UnlabeledEvent.CreatedAt.Unix(), nil, diff --git a/bug/op_label_change.go b/bug/op_label_change.go index a2108941..0e7929b7 100644 --- a/bug/op_label_change.go +++ b/bug/op_label_change.go @@ -234,6 +234,33 @@ func ChangeLabels(b Interface, author identity.Interface, unixTime int64, add, r return results, labelOp, nil } +// ForceChangeLabels is a convenience function to apply the operation +// The difference with ChangeLabels is that no checks of deduplications are done. You are entirely +// responsible of what you are doing. In the general case, you want to use ChangeLabels instead. +// The intended use of this function is to allow importers to create legal but unexpected label changes, +// like removing a label with no information of when it was added before. +func ForceChangeLabels(b Interface, author identity.Interface, unixTime int64, add, remove []string) (*LabelChangeOperation, error) { + added := make([]Label, len(add)) + for i, str := range add { + added[i] = Label(str) + } + + removed := make([]Label, len(remove)) + for i, str := range remove { + removed[i] = Label(str) + } + + labelOp := NewLabelChangeOperation(author, unixTime, added, removed) + + if err := labelOp.Validate(); err != nil { + return nil, err + } + + b.Append(labelOp) + + return labelOp, nil +} + func labelExist(labels []Label, label Label) bool { for _, l := range labels { if l == label { diff --git a/cache/bug_cache.go b/cache/bug_cache.go index 5fc76658..aad6dab8 100644 --- a/cache/bug_cache.go +++ b/cache/bug_cache.go @@ -139,6 +139,33 @@ func (c *BugCache) ChangeLabelsRaw(author *IdentityCache, unixTime int64, added return changes, op, nil } +func (c *BugCache) ForceChangeLabels(added []string, removed []string) (*bug.LabelChangeOperation, error) { + author, err := c.repoCache.GetUserIdentity() + if err != nil { + return nil, err + } + + return c.ForceChangeLabelsRaw(author, time.Now().Unix(), added, removed, nil) +} + +func (c *BugCache) ForceChangeLabelsRaw(author *IdentityCache, unixTime int64, added []string, removed []string, metadata map[string]string) (*bug.LabelChangeOperation, error) { + op, err := bug.ForceChangeLabels(c.bug, author.Identity, unixTime, added, removed) + if err != nil { + return nil, err + } + + for key, value := range metadata { + op.SetMetadata(key, value) + } + + err = c.notifyUpdated() + if err != nil { + return nil, err + } + + return op, nil +} + func (c *BugCache) Open() (*bug.SetStatusOperation, error) { author, err := c.repoCache.GetUserIdentity() if err != nil { -- cgit From 390ca867244004af56f05c19d26a4e9aeb20ae6c Mon Sep 17 00:00:00 2001 From: Amine Hilaly Date: Sat, 4 May 2019 17:38:03 +0200 Subject: Improve naming and functions signatures expose `NewIterator` in `github` package remove `exist` in tests cases skip tests when env var GITHUB_TOKEN is not given --- bridge/github/github.go | 4 +- bridge/github/import.go | 34 +++++----- bridge/github/import_test.go | 148 +++++++++++++++++++---------------------- bridge/github/iterator.go | 154 +++++++++++++++++++++++-------------------- 4 files changed, 169 insertions(+), 171 deletions(-) diff --git a/bridge/github/github.go b/bridge/github/github.go index b3f8d763..5fee7487 100644 --- a/bridge/github/github.go +++ b/bridge/github/github.go @@ -27,9 +27,9 @@ func (*Github) NewExporter() core.Exporter { return nil } -func buildClient(conf core.Configuration) *githubv4.Client { +func buildClient(token string) *githubv4.Client { src := oauth2.StaticTokenSource( - &oauth2.Token{AccessToken: conf[keyToken]}, + &oauth2.Token{AccessToken: token}, ) httpClient := oauth2.NewClient(context.TODO(), src) diff --git a/bridge/github/import.go b/bridge/github/import.go index 5f99b5ea..ad9d5046 100644 --- a/bridge/github/import.go +++ b/bridge/github/import.go @@ -22,29 +22,27 @@ const ( // githubImporter implement the Importer interface type githubImporter struct { - iterator *iterator - conf core.Configuration + conf core.Configuration } func (gi *githubImporter) Init(conf core.Configuration) error { gi.conf = conf - gi.iterator = newIterator(conf) return nil } // ImportAll . func (gi *githubImporter) ImportAll(repo *cache.RepoCache, since time.Time) error { - gi.iterator.since = since + iterator := NewIterator(gi.conf[keyUser], gi.conf[keyProject], gi.conf[keyToken], since) // Loop over all matching issues - for gi.iterator.NextIssue() { - issue := gi.iterator.IssueValue() + for iterator.NextIssue() { + issue := iterator.IssueValue() - fmt.Printf("importing issue: %v %v\n", gi.iterator.count, issue.Title) + fmt.Printf("importing issue: %v %v\n", iterator.importedIssues, issue.Title) // get issue edits issueEdits := []userContentEdit{} - for gi.iterator.NextIssueEdit() { - if issueEdit := gi.iterator.IssueEditValue(); issueEdit.Diff != nil && string(*issueEdit.Diff) != "" { + for iterator.NextIssueEdit() { + if issueEdit := iterator.IssueEditValue(); issueEdit.Diff != nil && string(*issueEdit.Diff) != "" { issueEdits = append(issueEdits, issueEdit) } } @@ -56,15 +54,15 @@ func (gi *githubImporter) ImportAll(repo *cache.RepoCache, since time.Time) erro } // loop over timeline items - for gi.iterator.NextTimeline() { - item := gi.iterator.TimelineValue() + for iterator.NextTimeline() { + item := iterator.TimelineValue() // if item is comment if item.Typename == "IssueComment" { // collect all edits commentEdits := []userContentEdit{} - for gi.iterator.NextCommentEdit() { - if commentEdit := gi.iterator.CommentEditValue(); commentEdit.Diff != nil && string(*commentEdit.Diff) != "" { + for iterator.NextCommentEdit() { + if commentEdit := iterator.CommentEditValue(); commentEdit.Diff != nil && string(*commentEdit.Diff) != "" { commentEdits = append(commentEdits, commentEdit) } } @@ -87,12 +85,12 @@ func (gi *githubImporter) ImportAll(repo *cache.RepoCache, since time.Time) erro } } - if err := gi.iterator.Error(); err != nil { + if err := iterator.Error(); err != nil { fmt.Printf("import error: %v\n", err) return err } - fmt.Printf("Successfully imported %v issues from Github\n", gi.iterator.Count()) + fmt.Printf("Successfully imported %v issues from Github\n", iterator.ImportedIssues()) return nil } @@ -389,7 +387,7 @@ func (gi *githubImporter) ensureCommentEdit(repo *cache.RepoCache, b *cache.BugC switch { case edit.DeletedAt != nil: // comment deletion, not supported yet - fmt.Println("comment deletion ....") + fmt.Println("comment deletion is not supported yet") case edit.DeletedAt == nil: @@ -475,7 +473,9 @@ func (gi *githubImporter) getGhost(repo *cache.RepoCache) (*cache.IdentityCache, "login": githubv4.String("ghost"), } - err = gi.iterator.gc.Query(context.TODO(), &q, variables) + gc := buildClient(gi.conf[keyToken]) + + err = gc.Query(context.TODO(), &q, variables) if err != nil { return nil, err } diff --git a/bridge/github/import_test.go b/bridge/github/import_test.go index d64f0b4b..79af8450 100644 --- a/bridge/github/import_test.go +++ b/bridge/github/import_test.go @@ -18,15 +18,13 @@ import ( func Test_Importer(t *testing.T) { author := identity.NewIdentity("Michael Muré", "batolettre@gmail.com") tests := []struct { - name string - exist bool - url string - bug *bug.Snapshot + name string + url string + bug *bug.Snapshot }{ { - name: "simple issue", - exist: true, - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/1", + name: "simple issue", + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/1", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "simple issue", "initial comment", nil), @@ -35,9 +33,8 @@ func Test_Importer(t *testing.T) { }, }, { - name: "empty issue", - exist: true, - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/2", + name: "empty issue", + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/2", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "empty issue", "", nil), @@ -45,9 +42,8 @@ func Test_Importer(t *testing.T) { }, }, { - name: "complex issue", - exist: true, - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/3", + name: "complex issue", + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/3", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "complex issue", "initial comment", nil), @@ -63,9 +59,8 @@ func Test_Importer(t *testing.T) { }, }, { - name: "editions", - exist: true, - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/4", + name: "editions", + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/4", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "editions", "initial comment edited", nil), @@ -76,9 +71,8 @@ func Test_Importer(t *testing.T) { }, }, { - name: "comment deletion", - exist: true, - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/5", + name: "comment deletion", + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/5", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "comment deletion", "", nil), @@ -86,9 +80,8 @@ func Test_Importer(t *testing.T) { }, }, { - name: "edition deletion", - exist: true, - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/6", + name: "edition deletion", + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/6", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "edition deletion", "initial comment", nil), @@ -99,9 +92,8 @@ func Test_Importer(t *testing.T) { }, }, { - name: "hidden comment", - exist: true, - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/7", + name: "hidden comment", + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/7", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "hidden comment", "initial comment", nil), @@ -110,9 +102,8 @@ func Test_Importer(t *testing.T) { }, }, { - name: "transfered issue", - exist: true, - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/8", + name: "transfered issue", + url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/8", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "transfered issue", "", nil), @@ -139,11 +130,16 @@ func Test_Importer(t *testing.T) { defer backend.Close() interrupt.RegisterCleaner(backend.Close) + token := os.Getenv("GITHUB_TOKEN") + if token == "" { + t.Skip("Env var GITHUB_TOKEN missing") + } + importer := &githubImporter{} err = importer.Init(core.Configuration{ "user": "MichaelMure", "project": "git-but-test-github-bridge", - "token": os.Getenv("GITHUB_TOKEN"), + "token": token, }) if err != nil { t.Fatal(err) @@ -165,59 +161,53 @@ func Test_Importer(t *testing.T) { } ops := b.Snapshot().Operations - if tt.exist { - assert.Equal(t, len(tt.bug.Operations), len(b.Snapshot().Operations)) - - for i, op := range tt.bug.Operations { - switch op.(type) { - case *bug.CreateOperation: - if op2, ok := ops[i].(*bug.CreateOperation); ok { - assert.Equal(t, op2.Title, op.(*bug.CreateOperation).Title) - assert.Equal(t, op2.Message, op.(*bug.CreateOperation).Message) - continue - } - t.Errorf("bad operation type index = %d expected = CreationOperation", i) - case *bug.SetStatusOperation: - if op2, ok := ops[i].(*bug.SetStatusOperation); ok { - assert.Equal(t, op2.Status, op.(*bug.SetStatusOperation).Status) - continue - } - t.Errorf("bad operation type index = %d expected = SetStatusOperation", i) - case *bug.SetTitleOperation: - if op2, ok := ops[i].(*bug.SetTitleOperation); ok { - assert.Equal(t, op.(*bug.SetTitleOperation).Was, op2.Was) - assert.Equal(t, op.(*bug.SetTitleOperation).Title, op2.Title) - continue - } - t.Errorf("bad operation type index = %d expected = SetTitleOperation", i) - case *bug.LabelChangeOperation: - if op2, ok := ops[i].(*bug.LabelChangeOperation); ok { - assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Added, op2.Added) - assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Removed, op2.Removed) - continue - } - t.Errorf("bad operation type index = %d expected = ChangeLabelOperation", i) - case *bug.AddCommentOperation: - if op2, ok := ops[i].(*bug.AddCommentOperation); ok { - assert.Equal(t, op.(*bug.AddCommentOperation).Message, op2.Message) - continue - } - t.Errorf("bad operation type index = %d expected = AddCommentOperation", i) - case *bug.EditCommentOperation: - if op2, ok := ops[i].(*bug.EditCommentOperation); ok { - assert.Equal(t, op.(*bug.EditCommentOperation).Message, op2.Message) - continue - } - t.Errorf("bad operation type index = %d expected = EditCommentOperation", i) - default: - + assert.Equal(t, len(tt.bug.Operations), len(b.Snapshot().Operations)) + + for i, op := range tt.bug.Operations { + switch op.(type) { + case *bug.CreateOperation: + if op2, ok := ops[i].(*bug.CreateOperation); ok { + assert.Equal(t, op2.Title, op.(*bug.CreateOperation).Title) + assert.Equal(t, op2.Message, op.(*bug.CreateOperation).Message) + continue + } + t.Errorf("bad operation type index = %d expected = CreationOperation", i) + case *bug.SetStatusOperation: + if op2, ok := ops[i].(*bug.SetStatusOperation); ok { + assert.Equal(t, op2.Status, op.(*bug.SetStatusOperation).Status) + continue + } + t.Errorf("bad operation type index = %d expected = SetStatusOperation", i) + case *bug.SetTitleOperation: + if op2, ok := ops[i].(*bug.SetTitleOperation); ok { + assert.Equal(t, op.(*bug.SetTitleOperation).Was, op2.Was) + assert.Equal(t, op.(*bug.SetTitleOperation).Title, op2.Title) + continue } + t.Errorf("bad operation type index = %d expected = SetTitleOperation", i) + case *bug.LabelChangeOperation: + if op2, ok := ops[i].(*bug.LabelChangeOperation); ok { + assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Added, op2.Added) + assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Removed, op2.Removed) + continue + } + t.Errorf("bad operation type index = %d expected = ChangeLabelOperation", i) + case *bug.AddCommentOperation: + if op2, ok := ops[i].(*bug.AddCommentOperation); ok { + assert.Equal(t, op.(*bug.AddCommentOperation).Message, op2.Message) + continue + } + t.Errorf("bad operation type index = %d expected = AddCommentOperation", i) + case *bug.EditCommentOperation: + if op2, ok := ops[i].(*bug.EditCommentOperation); ok { + assert.Equal(t, op.(*bug.EditCommentOperation).Message, op2.Message) + continue + } + t.Errorf("bad operation type index = %d expected = EditCommentOperation", i) + default: + panic("Unknown operation type") } - - } else { - assert.Equal(t, b, nil) } }) } - } diff --git a/bridge/github/iterator.go b/bridge/github/iterator.go index 281f8a6b..239b49bd 100644 --- a/bridge/github/iterator.go +++ b/bridge/github/iterator.go @@ -4,7 +4,6 @@ import ( "context" "time" - "github.com/MichaelMure/git-bug/bridge/core" "github.com/shurcooL/githubv4" ) @@ -50,8 +49,8 @@ type iterator struct { // sticky error err error - // count to keep track of the number of imported issues - count int + // number of imported issues + importedIssues int // timeline iterator timeline timelineIterator @@ -63,32 +62,32 @@ type iterator struct { commentEdit commentEditIterator } -func newIterator(conf core.Configuration) *iterator { +func NewIterator(user, project, token string, since time.Time) *iterator { return &iterator{ - gc: buildClient(conf), + gc: buildClient(token), + since: since, capacity: 10, - count: 0, timeline: timelineIterator{ index: -1, issueEdit: indexer{-1}, commentEdit: indexer{-1}, variables: map[string]interface{}{ - "owner": githubv4.String(conf["user"]), - "name": githubv4.String(conf["project"]), + "owner": githubv4.String(user), + "name": githubv4.String(project), }, }, commentEdit: commentEditIterator{ index: -1, variables: map[string]interface{}{ - "owner": githubv4.String(conf["user"]), - "name": githubv4.String(conf["project"]), + "owner": githubv4.String(user), + "name": githubv4.String(project), }, }, issueEdit: issueEditIterator{ index: -1, variables: map[string]interface{}{ - "owner": githubv4.String(conf["user"]), - "name": githubv4.String(conf["project"]), + "owner": githubv4.String(user), + "name": githubv4.String(project), }, }, } @@ -130,10 +129,11 @@ func (i *iterator) initCommentEditQueryVariables() { // reverse UserContentEdits arrays in both of the issue and // comment timelines func (i *iterator) reverseTimelineEditNodes() { - reverseEdits(i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) - for index, ce := range i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges { - if ce.Node.Typename == "IssueComment" && len(i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges) != 0 { - reverseEdits(i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[index].Node.IssueComment.UserContentEdits.Nodes) + node := i.timeline.query.Repository.Issues.Nodes[0] + reverseEdits(node.UserContentEdits.Nodes) + for index, ce := range node.Timeline.Edges { + if ce.Node.Typename == "IssueComment" && len(node.Timeline.Edges) != 0 { + reverseEdits(node.Timeline.Edges[index].Node.IssueComment.UserContentEdits.Nodes) } } } @@ -143,19 +143,34 @@ func (i *iterator) Error() error { return i.err } -// Count return number of issues we iterated over -func (i *iterator) Count() int { - return i.count +// ImportedIssues return the number of issues we iterated over +func (i *iterator) ImportedIssues() int { + return i.importedIssues +} + +func (i *iterator) queryIssue() bool { + if err := i.gc.Query(context.TODO(), &i.timeline.query, i.timeline.variables); err != nil { + i.err = err + return false + } + + if len(i.timeline.query.Repository.Issues.Nodes) == 0 { + return false + } + + i.reverseTimelineEditNodes() + i.importedIssues++ + return true } // Next issue func (i *iterator) NextIssue() bool { // we make the first move - if i.count == 0 { + if i.importedIssues == 0 { // init variables and goto queryIssue block i.initTimelineQueryVariables() - goto queryIssue + return i.queryIssue() } if i.err != nil { @@ -175,19 +190,7 @@ func (i *iterator) NextIssue() bool { i.timeline.lastEndCursor = i.timeline.query.Repository.Issues.Nodes[0].Timeline.PageInfo.EndCursor // query issue block -queryIssue: - if err := i.gc.Query(context.TODO(), &i.timeline.query, i.timeline.variables); err != nil { - i.err = err - return false - } - - if len(i.timeline.query.Repository.Issues.Nodes) == 0 { - return false - } - - i.reverseTimelineEditNodes() - i.count++ - return true + return i.queryIssue() } func (i *iterator) IssueValue() issueTimeline { @@ -230,6 +233,27 @@ func (i *iterator) TimelineValue() timelineItem { return i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index].Node } +func (i *iterator) queryIssueEdit() bool { + if err := i.gc.Query(context.TODO(), &i.issueEdit.query, i.issueEdit.variables); err != nil { + i.err = err + //i.timeline.issueEdit.index = -1 + return false + } + + // reverse issue edits because github + reverseEdits(i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) + + // this is not supposed to happen + if len(i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) == 0 { + i.timeline.issueEdit.index = -1 + return false + } + + i.issueEdit.index = 0 + i.timeline.issueEdit.index = -2 + return true +} + func (i *iterator) NextIssueEdit() bool { if i.err != nil { return false @@ -251,7 +275,7 @@ func (i *iterator) NextIssueEdit() bool { // if there is more edits, query them i.issueEdit.variables["issueEditBefore"] = i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.PageInfo.StartCursor - goto queryIssueEdit + return i.queryIssueEdit() } // if there is no edits @@ -273,26 +297,7 @@ func (i *iterator) NextIssueEdit() bool { // if there is more edits, query them i.initIssueEditQueryVariables() i.issueEdit.variables["issueEditBefore"] = i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.PageInfo.StartCursor - -queryIssueEdit: - if err := i.gc.Query(context.TODO(), &i.issueEdit.query, i.issueEdit.variables); err != nil { - i.err = err - //i.timeline.issueEdit.index = -1 - return false - } - - // reverse issue edits because github - reverseEdits(i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) - - // this is not supposed to happen - if len(i.issueEdit.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) == 0 { - i.timeline.issueEdit.index = -1 - return false - } - - i.issueEdit.index = 0 - i.timeline.issueEdit.index = -2 - return true + return i.queryIssueEdit() } func (i *iterator) IssueEditValue() userContentEdit { @@ -305,6 +310,25 @@ func (i *iterator) IssueEditValue() userContentEdit { return i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes[i.timeline.issueEdit.index] } +func (i *iterator) queryCommentEdit() bool { + if err := i.gc.Query(context.TODO(), &i.commentEdit.query, i.commentEdit.variables); err != nil { + i.err = err + return false + } + + // this is not supposed to happen + if len(i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.Nodes) == 0 { + i.timeline.commentEdit.index = -1 + return false + } + + reverseEdits(i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.Nodes) + + i.commentEdit.index = 0 + i.timeline.commentEdit.index = -2 + return true +} + func (i *iterator) NextCommentEdit() bool { if i.err != nil { return false @@ -326,7 +350,7 @@ func (i *iterator) NextCommentEdit() bool { // if there is more comment edits, query them i.commentEdit.variables["commentEditBefore"] = i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.PageInfo.StartCursor - goto queryCommentEdit + return i.queryCommentEdit() } // if there is no comment edits @@ -354,23 +378,7 @@ func (i *iterator) NextCommentEdit() bool { i.commentEdit.variables["commentEditBefore"] = i.timeline.query.Repository.Issues.Nodes[0].Timeline.Edges[i.timeline.index].Node.IssueComment.UserContentEdits.PageInfo.StartCursor -queryCommentEdit: - if err := i.gc.Query(context.TODO(), &i.commentEdit.query, i.commentEdit.variables); err != nil { - i.err = err - return false - } - - // this is not supposed to happen - if len(i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.Nodes) == 0 { - i.timeline.commentEdit.index = -1 - return false - } - - reverseEdits(i.commentEdit.query.Repository.Issues.Nodes[0].Timeline.Nodes[0].IssueComment.UserContentEdits.Nodes) - - i.commentEdit.index = 0 - i.timeline.commentEdit.index = -2 - return true + return i.queryCommentEdit() } func (i *iterator) CommentEditValue() userContentEdit { -- cgit From eec17050f1fbc8565dcd117329b24c201ac476b1 Mon Sep 17 00:00:00 2001 From: Michael Muré Date: Sat, 4 May 2019 21:45:18 +0200 Subject: github: simplify and improve the import test --- bridge/github/import_test.go | 83 +++++++++++++------------------------------- 1 file changed, 25 insertions(+), 58 deletions(-) diff --git a/bridge/github/import_test.go b/bridge/github/import_test.go index 79af8450..cd67c99b 100644 --- a/bridge/github/import_test.go +++ b/bridge/github/import_test.go @@ -1,18 +1,20 @@ package github import ( + "fmt" "os" "testing" "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/MichaelMure/git-bug/bridge/core" "github.com/MichaelMure/git-bug/bug" "github.com/MichaelMure/git-bug/cache" "github.com/MichaelMure/git-bug/identity" - "github.com/MichaelMure/git-bug/repository" "github.com/MichaelMure/git-bug/util/interrupt" + "github.com/MichaelMure/git-bug/util/test" ) func Test_Importer(t *testing.T) { @@ -112,20 +114,10 @@ func Test_Importer(t *testing.T) { }, } - cwd, err := os.Getwd() - if err != nil { - t.Fatal(err) - } - - repo, err := repository.NewGitRepo(cwd, bug.Witnesser) - if err != nil { - t.Fatal(err) - } + repo := test.CreateRepo(false) backend, err := cache.NewRepoCache(repo) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) defer backend.Close() interrupt.RegisterCleaner(backend.Close) @@ -141,69 +133,44 @@ func Test_Importer(t *testing.T) { "project": "git-but-test-github-bridge", "token": token, }) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) + + start := time.Now() err = importer.ImportAll(backend, time.Time{}) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) + + fmt.Printf("test repository imported in %f seconds\n", time.Since(start).Seconds()) - ids := backend.AllBugsIds() - assert.Equal(t, len(ids), 8) + require.Len(t, backend.AllBugsIds(), 8) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b, err := backend.ResolveBugCreateMetadata(keyGithubUrl, tt.url) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) ops := b.Snapshot().Operations - assert.Equal(t, len(tt.bug.Operations), len(b.Snapshot().Operations)) + assert.Len(t, tt.bug.Operations, len(b.Snapshot().Operations)) for i, op := range tt.bug.Operations { + assert.IsType(t, ops[i], op) + switch op.(type) { case *bug.CreateOperation: - if op2, ok := ops[i].(*bug.CreateOperation); ok { - assert.Equal(t, op2.Title, op.(*bug.CreateOperation).Title) - assert.Equal(t, op2.Message, op.(*bug.CreateOperation).Message) - continue - } - t.Errorf("bad operation type index = %d expected = CreationOperation", i) + assert.Equal(t, ops[i].(*bug.CreateOperation).Title, op.(*bug.CreateOperation).Title) + assert.Equal(t, ops[i].(*bug.CreateOperation).Message, op.(*bug.CreateOperation).Message) case *bug.SetStatusOperation: - if op2, ok := ops[i].(*bug.SetStatusOperation); ok { - assert.Equal(t, op2.Status, op.(*bug.SetStatusOperation).Status) - continue - } - t.Errorf("bad operation type index = %d expected = SetStatusOperation", i) + assert.Equal(t, ops[i].(*bug.SetStatusOperation).Status, op.(*bug.SetStatusOperation).Status) case *bug.SetTitleOperation: - if op2, ok := ops[i].(*bug.SetTitleOperation); ok { - assert.Equal(t, op.(*bug.SetTitleOperation).Was, op2.Was) - assert.Equal(t, op.(*bug.SetTitleOperation).Title, op2.Title) - continue - } - t.Errorf("bad operation type index = %d expected = SetTitleOperation", i) + assert.Equal(t, ops[i].(*bug.SetTitleOperation).Was, op.(*bug.SetTitleOperation).Was) + assert.Equal(t, ops[i].(*bug.SetTitleOperation).Title, op.(*bug.SetTitleOperation).Title) case *bug.LabelChangeOperation: - if op2, ok := ops[i].(*bug.LabelChangeOperation); ok { - assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Added, op2.Added) - assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Removed, op2.Removed) - continue - } - t.Errorf("bad operation type index = %d expected = ChangeLabelOperation", i) + assert.ElementsMatch(t, ops[i].(*bug.LabelChangeOperation).Added, op.(*bug.LabelChangeOperation).Added) + assert.ElementsMatch(t, ops[i].(*bug.LabelChangeOperation).Removed, op.(*bug.LabelChangeOperation).Removed) case *bug.AddCommentOperation: - if op2, ok := ops[i].(*bug.AddCommentOperation); ok { - assert.Equal(t, op.(*bug.AddCommentOperation).Message, op2.Message) - continue - } - t.Errorf("bad operation type index = %d expected = AddCommentOperation", i) + assert.Equal(t, ops[i].(*bug.AddCommentOperation).Message, op.(*bug.AddCommentOperation).Message) case *bug.EditCommentOperation: - if op2, ok := ops[i].(*bug.EditCommentOperation); ok { - assert.Equal(t, op.(*bug.EditCommentOperation).Message, op2.Message) - continue - } - t.Errorf("bad operation type index = %d expected = EditCommentOperation", i) + assert.Equal(t, ops[i].(*bug.EditCommentOperation).Message, op.(*bug.EditCommentOperation).Message) default: panic("Unknown operation type") } -- cgit From 7d0296337287ed3d5a97f15c64d51d24340f2567 Mon Sep 17 00:00:00 2001 From: Amine Hilaly Date: Sat, 4 May 2019 22:26:54 +0200 Subject: Add old importer comments in the iterator Test operation authors Fix typo in test repo url --- bridge/github/import.go | 4 ++++ bridge/github/import_test.go | 27 +++++++++++++++++---------- bridge/github/iterator.go | 13 ++++++++++++- 3 files changed, 33 insertions(+), 11 deletions(-) diff --git a/bridge/github/import.go b/bridge/github/import.go index ad9d5046..e72a2a45 100644 --- a/bridge/github/import.go +++ b/bridge/github/import.go @@ -42,6 +42,8 @@ func (gi *githubImporter) ImportAll(repo *cache.RepoCache, since time.Time) erro // get issue edits issueEdits := []userContentEdit{} for iterator.NextIssueEdit() { + // issueEdit.Diff == nil happen if the event is older than early 2018, Github doesn't have the data before that. + // Best we can do is to ignore the event. if issueEdit := iterator.IssueEditValue(); issueEdit.Diff != nil && string(*issueEdit.Diff) != "" { issueEdits = append(issueEdits, issueEdit) } @@ -130,6 +132,7 @@ func (gi *githubImporter) ensureIssue(repo *cache.RepoCache, issue issueTimeline // create bug from given issueEdits for i, edit := range issueEdits { if i == 0 && b != nil { + // The first edit in the github result is the issue creation itself, we already have that continue } @@ -323,6 +326,7 @@ func (gi *githubImporter) ensureTimelineComment(repo *cache.RepoCache, b *cache. } else { for i, edit := range item.UserContentEdits.Nodes { if i == 0 && target != "" { + // The first edit in the github result is the comment creation itself, we already have that continue } diff --git a/bridge/github/import_test.go b/bridge/github/import_test.go index cd67c99b..967f50ee 100644 --- a/bridge/github/import_test.go +++ b/bridge/github/import_test.go @@ -26,7 +26,7 @@ func Test_Importer(t *testing.T) { }{ { name: "simple issue", - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/1", + url: "https://github.com/MichaelMure/git-bug-test-github-bridge/issues/1", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "simple issue", "initial comment", nil), @@ -36,7 +36,7 @@ func Test_Importer(t *testing.T) { }, { name: "empty issue", - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/2", + url: "https://github.com/MichaelMure/git-bug-test-github-bridge/issues/2", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "empty issue", "", nil), @@ -45,7 +45,7 @@ func Test_Importer(t *testing.T) { }, { name: "complex issue", - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/3", + url: "https://github.com/MichaelMure/git-bug-test-github-bridge/issues/3", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "complex issue", "initial comment", nil), @@ -62,7 +62,7 @@ func Test_Importer(t *testing.T) { }, { name: "editions", - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/4", + url: "https://github.com/MichaelMure/git-bug-test-github-bridge/issues/4", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "editions", "initial comment edited", nil), @@ -74,7 +74,7 @@ func Test_Importer(t *testing.T) { }, { name: "comment deletion", - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/5", + url: "https://github.com/MichaelMure/git-bug-test-github-bridge/issues/5", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "comment deletion", "", nil), @@ -83,7 +83,7 @@ func Test_Importer(t *testing.T) { }, { name: "edition deletion", - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/6", + url: "https://github.com/MichaelMure/git-bug-test-github-bridge/issues/6", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "edition deletion", "initial comment", nil), @@ -95,7 +95,7 @@ func Test_Importer(t *testing.T) { }, { name: "hidden comment", - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/7", + url: "https://github.com/MichaelMure/git-bug-test-github-bridge/issues/7", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "hidden comment", "initial comment", nil), @@ -105,7 +105,7 @@ func Test_Importer(t *testing.T) { }, { name: "transfered issue", - url: "https://github.com/MichaelMure/git-but-test-github-bridge/issues/8", + url: "https://github.com/MichaelMure/git-bug-test-github-bridge/issues/8", bug: &bug.Snapshot{ Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "transfered issue", "", nil), @@ -130,7 +130,7 @@ func Test_Importer(t *testing.T) { importer := &githubImporter{} err = importer.Init(core.Configuration{ "user": "MichaelMure", - "project": "git-but-test-github-bridge", + "project": "git-bug-test-github-bridge", "token": token, }) require.NoError(t, err) @@ -153,24 +153,31 @@ func Test_Importer(t *testing.T) { assert.Len(t, tt.bug.Operations, len(b.Snapshot().Operations)) for i, op := range tt.bug.Operations { - assert.IsType(t, ops[i], op) + require.IsType(t, ops[i], op) switch op.(type) { case *bug.CreateOperation: assert.Equal(t, ops[i].(*bug.CreateOperation).Title, op.(*bug.CreateOperation).Title) assert.Equal(t, ops[i].(*bug.CreateOperation).Message, op.(*bug.CreateOperation).Message) + assert.Equal(t, ops[i].(*bug.CreateOperation).Author.Name(), op.(*bug.CreateOperation).Author.Name()) case *bug.SetStatusOperation: assert.Equal(t, ops[i].(*bug.SetStatusOperation).Status, op.(*bug.SetStatusOperation).Status) + assert.Equal(t, ops[i].(*bug.SetStatusOperation).Author.Name(), op.(*bug.SetStatusOperation).Author.Name()) case *bug.SetTitleOperation: assert.Equal(t, ops[i].(*bug.SetTitleOperation).Was, op.(*bug.SetTitleOperation).Was) assert.Equal(t, ops[i].(*bug.SetTitleOperation).Title, op.(*bug.SetTitleOperation).Title) + assert.Equal(t, ops[i].(*bug.SetTitleOperation).Author.Name(), op.(*bug.SetTitleOperation).Author.Name()) case *bug.LabelChangeOperation: assert.ElementsMatch(t, ops[i].(*bug.LabelChangeOperation).Added, op.(*bug.LabelChangeOperation).Added) assert.ElementsMatch(t, ops[i].(*bug.LabelChangeOperation).Removed, op.(*bug.LabelChangeOperation).Removed) + assert.Equal(t, ops[i].(*bug.LabelChangeOperation).Author.Name(), op.(*bug.LabelChangeOperation).Author.Name()) case *bug.AddCommentOperation: assert.Equal(t, ops[i].(*bug.AddCommentOperation).Message, op.(*bug.AddCommentOperation).Message) + assert.Equal(t, ops[i].(*bug.AddCommentOperation).Author.Name(), op.(*bug.AddCommentOperation).Author.Name()) case *bug.EditCommentOperation: assert.Equal(t, ops[i].(*bug.EditCommentOperation).Message, op.(*bug.EditCommentOperation).Message) + assert.Equal(t, ops[i].(*bug.EditCommentOperation).Author.Name(), op.(*bug.EditCommentOperation).Author.Name()) + default: panic("Unknown operation type") } diff --git a/bridge/github/iterator.go b/bridge/github/iterator.go index 239b49bd..48e98f17 100644 --- a/bridge/github/iterator.go +++ b/bridge/github/iterator.go @@ -100,6 +100,8 @@ func (i *iterator) initTimelineQueryVariables() { i.timeline.variables["issueSince"] = githubv4.DateTime{Time: i.since} i.timeline.variables["timelineFirst"] = githubv4.Int(i.capacity) i.timeline.variables["timelineAfter"] = (*githubv4.String)(nil) + // Fun fact, github provide the comment edition in reverse chronological + // order, because haha. Look at me, I'm dying of laughter. i.timeline.variables["issueEditLast"] = githubv4.Int(i.capacity) i.timeline.variables["issueEditBefore"] = (*githubv4.String)(nil) i.timeline.variables["commentEditLast"] = githubv4.Int(i.capacity) @@ -278,7 +280,16 @@ func (i *iterator) NextIssueEdit() bool { return i.queryIssueEdit() } - // if there is no edits + // if there is no edit, the UserContentEdits given by github is empty. That + // means that the original message is given by the issue message. + // + // if there is edits, the UserContentEdits given by github contains both the + // original message and the following edits. The issue message give the last + // version so we don't care about that. + // + // the tricky part: for an issue older than the UserContentEdits API, github + // doesn't have the previous message version anymore and give an edition + // with .Diff == nil. We have to filter them. if len(i.timeline.query.Repository.Issues.Nodes[0].UserContentEdits.Nodes) == 0 { return false } -- cgit From 537eddb97843a3f520fdedcd35f77b08880a4829 Mon Sep 17 00:00:00 2001 From: Amine Hilaly Date: Sun, 5 May 2019 14:08:48 +0200 Subject: Fix import bug --- bridge/github/import.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bridge/github/import.go b/bridge/github/import.go index e72a2a45..9b9b790e 100644 --- a/bridge/github/import.go +++ b/bridge/github/import.go @@ -324,7 +324,7 @@ func (gi *githubImporter) ensureTimelineComment(repo *cache.RepoCache, b *cache. } } } else { - for i, edit := range item.UserContentEdits.Nodes { + for i, edit := range edits { if i == 0 && target != "" { // The first edit in the github result is the comment creation itself, we already have that continue -- cgit From 2e17f371758ad25a3674d65ef0e8e32a4660e6d4 Mon Sep 17 00:00:00 2001 From: Amine Hilaly Date: Sun, 5 May 2019 17:48:49 +0200 Subject: Add unicode control characters test case Move `cleanupText` to utils/text/transform.go `text.Cleanup`: removing unicode control characters except for those allowed by `text.Safe` Add golang.org/x/text dependencies fix text.Cleanup Fix import panic --- Gopkg.lock | 13 + bridge/github/import.go | 46 +- bridge/github/import_test.go | 44 +- util/text/transform.go | 31 ++ vendor/golang.org/x/text/AUTHORS | 3 + vendor/golang.org/x/text/CONTRIBUTORS | 3 + vendor/golang.org/x/text/LICENSE | 27 + vendor/golang.org/x/text/PATENTS | 22 + vendor/golang.org/x/text/runes/cond.go | 187 +++++++ vendor/golang.org/x/text/runes/runes.go | 355 ++++++++++++ vendor/golang.org/x/text/transform/transform.go | 705 ++++++++++++++++++++++++ 11 files changed, 1404 insertions(+), 32 deletions(-) create mode 100644 util/text/transform.go create mode 100644 vendor/golang.org/x/text/AUTHORS create mode 100644 vendor/golang.org/x/text/CONTRIBUTORS create mode 100644 vendor/golang.org/x/text/LICENSE create mode 100644 vendor/golang.org/x/text/PATENTS create mode 100644 vendor/golang.org/x/text/runes/cond.go create mode 100644 vendor/golang.org/x/text/runes/runes.go create mode 100644 vendor/golang.org/x/text/transform/transform.go diff --git a/Gopkg.lock b/Gopkg.lock index be0614fe..52cb036c 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -386,6 +386,17 @@ pruneopts = "UT" revision = "ac767d655b305d4e9612f5f6e33120b9176c4ad4" +[[projects]] + digest = "1:86cb348528a842f96e651ca3f8197070e9ebc315f8c73e71d0df7a60e92a6db1" + name = "golang.org/x/text" + packages = [ + "runes", + "transform", + ] + pruneopts = "UT" + revision = "342b2e1fbaa52c93f31447ad2c6abc048c63e475" + version = "v0.3.2" + [[projects]] digest = "1:71850ac10bbeb4d8dd06ce0743fe57654daf28510b0f6cbd9692aaf0d269360e" name = "golang.org/x/tools" @@ -460,6 +471,8 @@ "github.com/vektah/gqlparser/ast", "golang.org/x/crypto/ssh/terminal", "golang.org/x/oauth2", + "golang.org/x/text/runes", + "golang.org/x/text/transform", ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/bridge/github/import.go b/bridge/github/import.go index 9b9b790e..0c5468d8 100644 --- a/bridge/github/import.go +++ b/bridge/github/import.go @@ -3,7 +3,6 @@ package github import ( "context" "fmt" - "strings" "time" "github.com/MichaelMure/git-bug/bridge/core" @@ -11,6 +10,7 @@ import ( "github.com/MichaelMure/git-bug/cache" "github.com/MichaelMure/git-bug/identity" "github.com/MichaelMure/git-bug/util/git" + "github.com/MichaelMure/git-bug/util/text" "github.com/shurcooL/githubv4" ) @@ -112,12 +112,17 @@ func (gi *githubImporter) ensureIssue(repo *cache.RepoCache, issue issueTimeline // if issueEdits is empty if len(issueEdits) == 0 { if err == bug.ErrBugNotExist { + cleanText, err := text.Cleanup(string(issue.Body)) + if err != nil { + return nil, err + } + // create bug b, err = repo.NewBugRaw( author, issue.CreatedAt.Unix(), issue.Title, - cleanupText(string(issue.Body)), + cleanText, nil, map[string]string{ keyGithubId: parseId(issue.Id), @@ -136,6 +141,11 @@ func (gi *githubImporter) ensureIssue(repo *cache.RepoCache, issue issueTimeline continue } + cleanText, err := text.Cleanup(string(*edit.Diff)) + if err != nil { + return nil, err + } + // if the bug doesn't exist if b == nil { // we create the bug as soon as we have a legit first edition @@ -143,7 +153,7 @@ func (gi *githubImporter) ensureIssue(repo *cache.RepoCache, issue issueTimeline author, issue.CreatedAt.Unix(), issue.Title, - cleanupText(string(*edit.Diff)), + cleanText, nil, map[string]string{ keyGithubId: parseId(issue.Id), @@ -301,12 +311,16 @@ func (gi *githubImporter) ensureTimelineComment(repo *cache.RepoCache, b *cache. // if comment doesn't exist if err == cache.ErrNoMatchingOp { + cleanText, err := text.Cleanup(string(item.Body)) + if err != nil { + return err + } // add comment operation op, err := b.AddCommentRaw( author, item.CreatedAt.Unix(), - cleanupText(string(item.Body)), + cleanText, nil, map[string]string{ keyGithubId: parseId(item.Id), @@ -338,10 +352,15 @@ func (gi *githubImporter) ensureTimelineComment(repo *cache.RepoCache, b *cache. // create comment when target is empty if target == "" { + cleanText, err := text.Cleanup(string(*edit.Diff)) + if err != nil { + return err + } + op, err := b.AddCommentRaw( editor, edit.CreatedAt.Unix(), - cleanupText(string(*edit.Diff)), + cleanText, nil, map[string]string{ keyGithubId: parseId(item.Id), @@ -395,12 +414,17 @@ func (gi *githubImporter) ensureCommentEdit(repo *cache.RepoCache, b *cache.BugC case edit.DeletedAt == nil: + cleanText, err := text.Cleanup(string(*edit.Diff)) + if err != nil { + return err + } + // comment edition - _, err := b.EditCommentRaw( + _, err = b.EditCommentRaw( editor, edit.CreatedAt.Unix(), target, - cleanupText(string(*edit.Diff)), + cleanText, map[string]string{ keyGithubId: parseId(edit.Id), }, @@ -505,14 +529,6 @@ func parseId(id githubv4.ID) string { return fmt.Sprintf("%v", id) } -func cleanupText(text string) string { - // windows new line, Github, really ? - text = strings.Replace(text, "\r\n", "\n", -1) - - // trim extra new line not displayed in the github UI but still present in the data - return strings.TrimSpace(text) -} - func reverseEdits(edits []userContentEdit) []userContentEdit { for i, j := 0, len(edits)-1; i < j; i, j = i+1, j-1 { edits[i], edits[j] = edits[j], edits[i] diff --git a/bridge/github/import_test.go b/bridge/github/import_test.go index 967f50ee..48283b7a 100644 --- a/bridge/github/import_test.go +++ b/bridge/github/import_test.go @@ -31,7 +31,8 @@ func Test_Importer(t *testing.T) { Operations: []bug.Operation{ bug.NewCreateOp(author, 0, "simple issue", "initial comment", nil), bug.NewAddCommentOp(author, 0, "first comment", nil), - bug.NewAddCommentOp(author, 0, "second comment", nil)}, + bug.NewAddCommentOp(author, 0, "second comment", nil), + }, }, }, { @@ -112,6 +113,15 @@ func Test_Importer(t *testing.T) { }, }, }, + { + name: "unicode control characters", + url: "https://github.com/MichaelMure/git-bug-test-github-bridge/issues/10", + bug: &bug.Snapshot{ + Operations: []bug.Operation{ + bug.NewCreateOp(author, 0, "unicode control characters", "u0000: \nu0001: \nu0002: \nu0003: \nu0004: \nu0005: \nu0006: \nu0007: \nu0008: \nu0009: \t\nu0010: \nu0011: \nu0012: \nu0013: \nu0014: \nu0015: \nu0016: \nu0017: \nu0018: \nu0019:", nil), + }, + }, + }, } repo := test.CreateRepo(false) @@ -142,7 +152,7 @@ func Test_Importer(t *testing.T) { fmt.Printf("test repository imported in %f seconds\n", time.Since(start).Seconds()) - require.Len(t, backend.AllBugsIds(), 8) + require.Len(t, backend.AllBugsIds(), 9) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -157,26 +167,26 @@ func Test_Importer(t *testing.T) { switch op.(type) { case *bug.CreateOperation: - assert.Equal(t, ops[i].(*bug.CreateOperation).Title, op.(*bug.CreateOperation).Title) - assert.Equal(t, ops[i].(*bug.CreateOperation).Message, op.(*bug.CreateOperation).Message) - assert.Equal(t, ops[i].(*bug.CreateOperation).Author.Name(), op.(*bug.CreateOperation).Author.Name()) + assert.Equal(t, op.(*bug.CreateOperation).Title, ops[i].(*bug.CreateOperation).Title) + assert.Equal(t, op.(*bug.CreateOperation).Message, ops[i].(*bug.CreateOperation).Message) + assert.Equal(t, op.(*bug.CreateOperation).Author.Name(), ops[i].(*bug.CreateOperation).Author.Name()) case *bug.SetStatusOperation: - assert.Equal(t, ops[i].(*bug.SetStatusOperation).Status, op.(*bug.SetStatusOperation).Status) - assert.Equal(t, ops[i].(*bug.SetStatusOperation).Author.Name(), op.(*bug.SetStatusOperation).Author.Name()) + assert.Equal(t, op.(*bug.SetStatusOperation).Status, ops[i].(*bug.SetStatusOperation).Status) + assert.Equal(t, op.(*bug.SetStatusOperation).Author.Name(), ops[i].(*bug.SetStatusOperation).Author.Name()) case *bug.SetTitleOperation: - assert.Equal(t, ops[i].(*bug.SetTitleOperation).Was, op.(*bug.SetTitleOperation).Was) - assert.Equal(t, ops[i].(*bug.SetTitleOperation).Title, op.(*bug.SetTitleOperation).Title) - assert.Equal(t, ops[i].(*bug.SetTitleOperation).Author.Name(), op.(*bug.SetTitleOperation).Author.Name()) + assert.Equal(t, op.(*bug.SetTitleOperation).Was, ops[i].(*bug.SetTitleOperation).Was) + assert.Equal(t, op.(*bug.SetTitleOperation).Title, ops[i].(*bug.SetTitleOperation).Title) + assert.Equal(t, op.(*bug.SetTitleOperation).Author.Name(), ops[i].(*bug.SetTitleOperation).Author.Name()) case *bug.LabelChangeOperation: - assert.ElementsMatch(t, ops[i].(*bug.LabelChangeOperation).Added, op.(*bug.LabelChangeOperation).Added) - assert.ElementsMatch(t, ops[i].(*bug.LabelChangeOperation).Removed, op.(*bug.LabelChangeOperation).Removed) - assert.Equal(t, ops[i].(*bug.LabelChangeOperation).Author.Name(), op.(*bug.LabelChangeOperation).Author.Name()) + assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Added, ops[i].(*bug.LabelChangeOperation).Added) + assert.ElementsMatch(t, op.(*bug.LabelChangeOperation).Removed, ops[i].(*bug.LabelChangeOperation).Removed) + assert.Equal(t, op.(*bug.LabelChangeOperation).Author.Name(), ops[i].(*bug.LabelChangeOperation).Author.Name()) case *bug.AddCommentOperation: - assert.Equal(t, ops[i].(*bug.AddCommentOperation).Message, op.(*bug.AddCommentOperation).Message) - assert.Equal(t, ops[i].(*bug.AddCommentOperation).Author.Name(), op.(*bug.AddCommentOperation).Author.Name()) + assert.Equal(t, op.(*bug.AddCommentOperation).Message, ops[i].(*bug.AddCommentOperation).Message) + assert.Equal(t, op.(*bug.AddCommentOperation).Author.Name(), ops[i].(*bug.AddCommentOperation).Author.Name()) case *bug.EditCommentOperation: - assert.Equal(t, ops[i].(*bug.EditCommentOperation).Message, op.(*bug.EditCommentOperation).Message) - assert.Equal(t, ops[i].(*bug.EditCommentOperation).Author.Name(), op.(*bug.EditCommentOperation).Author.Name()) + assert.Equal(t, op.(*bug.EditCommentOperation).Message, ops[i].(*bug.EditCommentOperation).Message) + assert.Equal(t, op.(*bug.EditCommentOperation).Author.Name(), ops[i].(*bug.EditCommentOperation).Author.Name()) default: panic("Unknown operation type") diff --git a/util/text/transform.go b/util/text/transform.go new file mode 100644 index 00000000..59dc4e03 --- /dev/null +++ b/util/text/transform.go @@ -0,0 +1,31 @@ +package text + +import ( + "strings" + "unicode" + + "golang.org/x/text/runes" + "golang.org/x/text/transform" +) + +func Cleanup(text string) (string, error) { + // windows new line, Github, really ? + text = strings.Replace(text, "\r\n", "\n", -1) + + // remove all unicode control characters except + // '\n', '\r' and '\t' + t := runes.Remove(runes.Predicate(func(r rune) bool { + switch r { + case '\r', '\n', '\t': + return false + } + return unicode.IsControl(r) + })) + sanitized, _, err := transform.String(t, text) + if err != nil { + return "", err + } + + // trim extra new line not displayed in the github UI but still present in the data + return strings.TrimSpace(sanitized), nil +} diff --git a/vendor/golang.org/x/text/AUTHORS b/vendor/golang.org/x/text/AUTHORS new file mode 100644 index 00000000..15167cd7 --- /dev/null +++ b/vendor/golang.org/x/text/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/golang.org/x/text/CONTRIBUTORS b/vendor/golang.org/x/text/CONTRIBUTORS new file mode 100644 index 00000000..1c4577e9 --- /dev/null +++ b/vendor/golang.org/x/text/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/text/LICENSE b/vendor/golang.org/x/text/LICENSE new file mode 100644 index 00000000..6a66aea5 --- /dev/null +++ b/vendor/golang.org/x/text/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/text/PATENTS b/vendor/golang.org/x/text/PATENTS new file mode 100644 index 00000000..73309904 --- /dev/null +++ b/vendor/golang.org/x/text/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/text/runes/cond.go b/vendor/golang.org/x/text/runes/cond.go new file mode 100644 index 00000000..df7aa02d --- /dev/null +++ b/vendor/golang.org/x/text/runes/cond.go @@ -0,0 +1,187 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package runes + +import ( + "unicode/utf8" + + "golang.org/x/text/transform" +) + +// Note: below we pass invalid UTF-8 to the tIn and tNotIn transformers as is. +// This is done for various reasons: +// - To retain the semantics of the Nop transformer: if input is passed to a Nop +// one would expect it to be unchanged. +// - It would be very expensive to pass a converted RuneError to a transformer: +// a transformer might need more source bytes after RuneError, meaning that +// the only way to pass it safely is to create a new buffer and manage the +// intermingling of RuneErrors and normal input. +// - Many transformers leave ill-formed UTF-8 as is, so this is not +// inconsistent. Generally ill-formed UTF-8 is only replaced if it is a +// logical consequence of the operation (as for Map) or if it otherwise would +// pose security concerns (as for Remove). +// - An alternative would be to return an error on ill-formed UTF-8, but this +// would be inconsistent with other operations. + +// If returns a transformer that applies tIn to consecutive runes for which +// s.Contains(r) and tNotIn to consecutive runes for which !s.Contains(r). Reset +// is called on tIn and tNotIn at the start of each run. A Nop transformer will +// substitute a nil value passed to tIn or tNotIn. Invalid UTF-8 is translated +// to RuneError to determine which transformer to apply, but is passed as is to +// the respective transformer. +func If(s Set, tIn, tNotIn transform.Transformer) Transformer { + if tIn == nil && tNotIn == nil { + return Transformer{transform.Nop} + } + if tIn == nil { + tIn = transform.Nop + } + if tNotIn == nil { + tNotIn = transform.Nop + } + sIn, ok := tIn.(transform.SpanningTransformer) + if !ok { + sIn = dummySpan{tIn} + } + sNotIn, ok := tNotIn.(transform.SpanningTransformer) + if !ok { + sNotIn = dummySpan{tNotIn} + } + + a := &cond{ + tIn: sIn, + tNotIn: sNotIn, + f: s.Contains, + } + a.Reset() + return Transformer{a} +} + +type dummySpan struct{ transform.Transformer } + +func (d dummySpan) Span(src []byte, atEOF bool) (n int, err error) { + return 0, transform.ErrEndOfSpan +} + +type cond struct { + tIn, tNotIn transform.SpanningTransformer + f func(rune) bool + check func(rune) bool // current check to perform + t transform.SpanningTransformer // current transformer to use +} + +// Reset implements transform.Transformer. +func (t *cond) Reset() { + t.check = t.is + t.t = t.tIn + t.t.Reset() // notIn will be reset on first usage. +} + +func (t *cond) is(r rune) bool { + if t.f(r) { + return true + } + t.check = t.isNot + t.t = t.tNotIn + t.tNotIn.Reset() + return false +} + +func (t *cond) isNot(r rune) bool { + if !t.f(r) { + return true + } + t.check = t.is + t.t = t.tIn + t.tIn.Reset() + return false +} + +// This implementation of Span doesn't help all too much, but it needs to be +// there to satisfy this package's Transformer interface. +// TODO: there are certainly room for improvements, though. For example, if +// t.t == transform.Nop (which will a common occurrence) it will save a bundle +// to special-case that loop. +func (t *cond) Span(src []byte, atEOF bool) (n int, err error) { + p := 0 + for n < len(src) && err == nil { + // Don't process too much at a time as the Spanner that will be + // called on this block may terminate early. + const maxChunk = 4096 + max := len(src) + if v := n + maxChunk; v < max { + max = v + } + atEnd := false + size := 0 + current := t.t + for ; p < max; p += size { + r := rune(src[p]) + if r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[p:]); size == 1 { + if !atEOF && !utf8.FullRune(src[p:]) { + err = transform.ErrShortSrc + break + } + } + if !t.check(r) { + // The next rune will be the start of a new run. + atEnd = true + break + } + } + n2, err2 := current.Span(src[n:p], atEnd || (atEOF && p == len(src))) + n += n2 + if err2 != nil { + return n, err2 + } + // At this point either err != nil or t.check will pass for the rune at p. + p = n + size + } + return n, err +} + +func (t *cond) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + p := 0 + for nSrc < len(src) && err == nil { + // Don't process too much at a time, as the work might be wasted if the + // destination buffer isn't large enough to hold the result or a + // transform returns an error early. + const maxChunk = 4096 + max := len(src) + if n := nSrc + maxChunk; n < len(src) { + max = n + } + atEnd := false + size := 0 + current := t.t + for ; p < max; p += size { + r := rune(src[p]) + if r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[p:]); size == 1 { + if !atEOF && !utf8.FullRune(src[p:]) { + err = transform.ErrShortSrc + break + } + } + if !t.check(r) { + // The next rune will be the start of a new run. + atEnd = true + break + } + } + nDst2, nSrc2, err2 := current.Transform(dst[nDst:], src[nSrc:p], atEnd || (atEOF && p == len(src))) + nDst += nDst2 + nSrc += nSrc2 + if err2 != nil { + return nDst, nSrc, err2 + } + // At this point either err != nil or t.check will pass for the rune at p. + p = nSrc + size + } + return nDst, nSrc, err +} diff --git a/vendor/golang.org/x/text/runes/runes.go b/vendor/golang.org/x/text/runes/runes.go new file mode 100644 index 00000000..71933696 --- /dev/null +++ b/vendor/golang.org/x/text/runes/runes.go @@ -0,0 +1,355 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package runes provide transforms for UTF-8 encoded text. +package runes // import "golang.org/x/text/runes" + +import ( + "unicode" + "unicode/utf8" + + "golang.org/x/text/transform" +) + +// A Set is a collection of runes. +type Set interface { + // Contains returns true if r is contained in the set. + Contains(r rune) bool +} + +type setFunc func(rune) bool + +func (s setFunc) Contains(r rune) bool { + return s(r) +} + +// Note: using funcs here instead of wrapping types result in cleaner +// documentation and a smaller API. + +// In creates a Set with a Contains method that returns true for all runes in +// the given RangeTable. +func In(rt *unicode.RangeTable) Set { + return setFunc(func(r rune) bool { return unicode.Is(rt, r) }) +} + +// In creates a Set with a Contains method that returns true for all runes not +// in the given RangeTable. +func NotIn(rt *unicode.RangeTable) Set { + return setFunc(func(r rune) bool { return !unicode.Is(rt, r) }) +} + +// Predicate creates a Set with a Contains method that returns f(r). +func Predicate(f func(rune) bool) Set { + return setFunc(f) +} + +// Transformer implements the transform.Transformer interface. +type Transformer struct { + t transform.SpanningTransformer +} + +func (t Transformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + return t.t.Transform(dst, src, atEOF) +} + +func (t Transformer) Span(b []byte, atEOF bool) (n int, err error) { + return t.t.Span(b, atEOF) +} + +func (t Transformer) Reset() { t.t.Reset() } + +// Bytes returns a new byte slice with the result of converting b using t. It +// calls Reset on t. It returns nil if any error was found. This can only happen +// if an error-producing Transformer is passed to If. +func (t Transformer) Bytes(b []byte) []byte { + b, _, err := transform.Bytes(t, b) + if err != nil { + return nil + } + return b +} + +// String returns a string with the result of converting s using t. It calls +// Reset on t. It returns the empty string if any error was found. This can only +// happen if an error-producing Transformer is passed to If. +func (t Transformer) String(s string) string { + s, _, err := transform.String(t, s) + if err != nil { + return "" + } + return s +} + +// TODO: +// - Copy: copying strings and bytes in whole-rune units. +// - Validation (maybe) +// - Well-formed-ness (maybe) + +const runeErrorString = string(utf8.RuneError) + +// Remove returns a Transformer that removes runes r for which s.Contains(r). +// Illegal input bytes are replaced by RuneError before being passed to f. +func Remove(s Set) Transformer { + if f, ok := s.(setFunc); ok { + // This little trick cuts the running time of BenchmarkRemove for sets + // created by Predicate roughly in half. + // TODO: special-case RangeTables as well. + return Transformer{remove(f)} + } + return Transformer{remove(s.Contains)} +} + +// TODO: remove transform.RemoveFunc. + +type remove func(r rune) bool + +func (remove) Reset() {} + +// Span implements transform.Spanner. +func (t remove) Span(src []byte, atEOF bool) (n int, err error) { + for r, size := rune(0), 0; n < len(src); { + if r = rune(src[n]); r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[n:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[n:]) { + err = transform.ErrShortSrc + } else { + err = transform.ErrEndOfSpan + } + break + } + if t(r) { + err = transform.ErrEndOfSpan + break + } + n += size + } + return +} + +// Transform implements transform.Transformer. +func (t remove) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for r, size := rune(0), 0; nSrc < len(src); { + if r = rune(src[nSrc]); r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + // We replace illegal bytes with RuneError. Not doing so might + // otherwise turn a sequence of invalid UTF-8 into valid UTF-8. + // The resulting byte sequence may subsequently contain runes + // for which t(r) is true that were passed unnoticed. + if !t(utf8.RuneError) { + if nDst+3 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = runeErrorString[0] + dst[nDst+1] = runeErrorString[1] + dst[nDst+2] = runeErrorString[2] + nDst += 3 + } + nSrc++ + continue + } + if t(r) { + nSrc += size + continue + } + if nDst+size > len(dst) { + err = transform.ErrShortDst + break + } + for i := 0; i < size; i++ { + dst[nDst] = src[nSrc] + nDst++ + nSrc++ + } + } + return +} + +// Map returns a Transformer that maps the runes in the input using the given +// mapping. Illegal bytes in the input are converted to utf8.RuneError before +// being passed to the mapping func. +func Map(mapping func(rune) rune) Transformer { + return Transformer{mapper(mapping)} +} + +type mapper func(rune) rune + +func (mapper) Reset() {} + +// Span implements transform.Spanner. +func (t mapper) Span(src []byte, atEOF bool) (n int, err error) { + for r, size := rune(0), 0; n < len(src); n += size { + if r = rune(src[n]); r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[n:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[n:]) { + err = transform.ErrShortSrc + } else { + err = transform.ErrEndOfSpan + } + break + } + if t(r) != r { + err = transform.ErrEndOfSpan + break + } + } + return n, err +} + +// Transform implements transform.Transformer. +func (t mapper) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + var replacement rune + var b [utf8.UTFMax]byte + + for r, size := rune(0), 0; nSrc < len(src); { + if r = rune(src[nSrc]); r < utf8.RuneSelf { + if replacement = t(r); replacement < utf8.RuneSelf { + if nDst == len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst] = byte(replacement) + nDst++ + nSrc++ + continue + } + size = 1 + } else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + + if replacement = t(utf8.RuneError); replacement == utf8.RuneError { + if nDst+3 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = runeErrorString[0] + dst[nDst+1] = runeErrorString[1] + dst[nDst+2] = runeErrorString[2] + nDst += 3 + nSrc++ + continue + } + } else if replacement = t(r); replacement == r { + if nDst+size > len(dst) { + err = transform.ErrShortDst + break + } + for i := 0; i < size; i++ { + dst[nDst] = src[nSrc] + nDst++ + nSrc++ + } + continue + } + + n := utf8.EncodeRune(b[:], replacement) + + if nDst+n > len(dst) { + err = transform.ErrShortDst + break + } + for i := 0; i < n; i++ { + dst[nDst] = b[i] + nDst++ + } + nSrc += size + } + return +} + +// ReplaceIllFormed returns a transformer that replaces all input bytes that are +// not part of a well-formed UTF-8 code sequence with utf8.RuneError. +func ReplaceIllFormed() Transformer { + return Transformer{&replaceIllFormed{}} +} + +type replaceIllFormed struct{ transform.NopResetter } + +func (t replaceIllFormed) Span(src []byte, atEOF bool) (n int, err error) { + for n < len(src) { + // ASCII fast path. + if src[n] < utf8.RuneSelf { + n++ + continue + } + + r, size := utf8.DecodeRune(src[n:]) + + // Look for a valid non-ASCII rune. + if r != utf8.RuneError || size != 1 { + n += size + continue + } + + // Look for short source data. + if !atEOF && !utf8.FullRune(src[n:]) { + err = transform.ErrShortSrc + break + } + + // We have an invalid rune. + err = transform.ErrEndOfSpan + break + } + return n, err +} + +func (t replaceIllFormed) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for nSrc < len(src) { + // ASCII fast path. + if r := src[nSrc]; r < utf8.RuneSelf { + if nDst == len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst] = r + nDst++ + nSrc++ + continue + } + + // Look for a valid non-ASCII rune. + if _, size := utf8.DecodeRune(src[nSrc:]); size != 1 { + if size != copy(dst[nDst:], src[nSrc:nSrc+size]) { + err = transform.ErrShortDst + break + } + nDst += size + nSrc += size + continue + } + + // Look for short source data. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + + // We have an invalid rune. + if nDst+3 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = runeErrorString[0] + dst[nDst+1] = runeErrorString[1] + dst[nDst+2] = runeErrorString[2] + nDst += 3 + nSrc++ + } + return nDst, nSrc, err +} diff --git a/vendor/golang.org/x/text/transform/transform.go b/vendor/golang.org/x/text/transform/transform.go new file mode 100644 index 00000000..520b9ada --- /dev/null +++ b/vendor/golang.org/x/text/transform/transform.go @@ -0,0 +1,705 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package transform provides reader and writer wrappers that transform the +// bytes passing through as well as various transformations. Example +// transformations provided by other packages include normalization and +// conversion between character sets. +package transform // import "golang.org/x/text/transform" + +import ( + "bytes" + "errors" + "io" + "unicode/utf8" +) + +var ( + // ErrShortDst means that the destination buffer was too short to + // receive all of the transformed bytes. + ErrShortDst = errors.New("transform: short destination buffer") + + // ErrShortSrc means that the source buffer has insufficient data to + // complete the transformation. + ErrShortSrc = errors.New("transform: short source buffer") + + // ErrEndOfSpan means that the input and output (the transformed input) + // are not identical. + ErrEndOfSpan = errors.New("transform: input and output are not identical") + + // errInconsistentByteCount means that Transform returned success (nil + // error) but also returned nSrc inconsistent with the src argument. + errInconsistentByteCount = errors.New("transform: inconsistent byte count returned") + + // errShortInternal means that an internal buffer is not large enough + // to make progress and the Transform operation must be aborted. + errShortInternal = errors.New("transform: short internal buffer") +) + +// Transformer transforms bytes. +type Transformer interface { + // Transform writes to dst the transformed bytes read from src, and + // returns the number of dst bytes written and src bytes read. The + // atEOF argument tells whether src represents the last bytes of the + // input. + // + // Callers should always process the nDst bytes produced and account + // for the nSrc bytes consumed before considering the error err. + // + // A nil error means that all of the transformed bytes (whether freshly + // transformed from src or left over from previous Transform calls) + // were written to dst. A nil error can be returned regardless of + // whether atEOF is true. If err is nil then nSrc must equal len(src); + // the converse is not necessarily true. + // + // ErrShortDst means that dst was too short to receive all of the + // transformed bytes. ErrShortSrc means that src had insufficient data + // to complete the transformation. If both conditions apply, then + // either error may be returned. Other than the error conditions listed + // here, implementations are free to report other errors that arise. + Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) + + // Reset resets the state and allows a Transformer to be reused. + Reset() +} + +// SpanningTransformer extends the Transformer interface with a Span method +// that determines how much of the input already conforms to the Transformer. +type SpanningTransformer interface { + Transformer + + // Span returns a position in src such that transforming src[:n] results in + // identical output src[:n] for these bytes. It does not necessarily return + // the largest such n. The atEOF argument tells whether src represents the + // last bytes of the input. + // + // Callers should always account for the n bytes consumed before + // considering the error err. + // + // A nil error means that all input bytes are known to be identical to the + // output produced by the Transformer. A nil error can be returned + // regardless of whether atEOF is true. If err is nil, then n must + // equal len(src); the converse is not necessarily true. + // + // ErrEndOfSpan means that the Transformer output may differ from the + // input after n bytes. Note that n may be len(src), meaning that the output + // would contain additional bytes after otherwise identical output. + // ErrShortSrc means that src had insufficient data to determine whether the + // remaining bytes would change. Other than the error conditions listed + // here, implementations are free to report other errors that arise. + // + // Calling Span can modify the Transformer state as a side effect. In + // effect, it does the transformation just as calling Transform would, only + // without copying to a destination buffer and only up to a point it can + // determine the input and output bytes are the same. This is obviously more + // limited than calling Transform, but can be more efficient in terms of + // copying and allocating buffers. Calls to Span and Transform may be + // interleaved. + Span(src []byte, atEOF bool) (n int, err error) +} + +// NopResetter can be embedded by implementations of Transformer to add a nop +// Reset method. +type NopResetter struct{} + +// Reset implements the Reset method of the Transformer interface. +func (NopResetter) Reset() {} + +// Reader wraps another io.Reader by transforming the bytes read. +type Reader struct { + r io.Reader + t Transformer + err error + + // dst[dst0:dst1] contains bytes that have been transformed by t but + // not yet copied out via Read. + dst []byte + dst0, dst1 int + + // src[src0:src1] contains bytes that have been read from r but not + // yet transformed through t. + src []byte + src0, src1 int + + // transformComplete is whether the transformation is complete, + // regardless of whether or not it was successful. + transformComplete bool +} + +const defaultBufSize = 4096 + +// NewReader returns a new Reader that wraps r by transforming the bytes read +// via t. It calls Reset on t. +func NewReader(r io.Reader, t Transformer) *Reader { + t.Reset() + return &Reader{ + r: r, + t: t, + dst: make([]byte, defaultBufSize), + src: make([]byte, defaultBufSize), + } +} + +// Read implements the io.Reader interface. +func (r *Reader) Read(p []byte) (int, error) { + n, err := 0, error(nil) + for { + // Copy out any transformed bytes and return the final error if we are done. + if r.dst0 != r.dst1 { + n = copy(p, r.dst[r.dst0:r.dst1]) + r.dst0 += n + if r.dst0 == r.dst1 && r.transformComplete { + return n, r.err + } + return n, nil + } else if r.transformComplete { + return 0, r.err + } + + // Try to transform some source bytes, or to flush the transformer if we + // are out of source bytes. We do this even if r.r.Read returned an error. + // As the io.Reader documentation says, "process the n > 0 bytes returned + // before considering the error". + if r.src0 != r.src1 || r.err != nil { + r.dst0 = 0 + r.dst1, n, err = r.t.Transform(r.dst, r.src[r.src0:r.src1], r.err == io.EOF) + r.src0 += n + + switch { + case err == nil: + if r.src0 != r.src1 { + r.err = errInconsistentByteCount + } + // The Transform call was successful; we are complete if we + // cannot read more bytes into src. + r.transformComplete = r.err != nil + continue + case err == ErrShortDst && (r.dst1 != 0 || n != 0): + // Make room in dst by copying out, and try again. + continue + case err == ErrShortSrc && r.src1-r.src0 != len(r.src) && r.err == nil: + // Read more bytes into src via the code below, and try again. + default: + r.transformComplete = true + // The reader error (r.err) takes precedence over the + // transformer error (err) unless r.err is nil or io.EOF. + if r.err == nil || r.err == io.EOF { + r.err = err + } + continue + } + } + + // Move any untransformed source bytes to the start of the buffer + // and read more bytes. + if r.src0 != 0 { + r.src0, r.src1 = 0, copy(r.src, r.src[r.src0:r.src1]) + } + n, r.err = r.r.Read(r.src[r.src1:]) + r.src1 += n + } +} + +// TODO: implement ReadByte (and ReadRune??). + +// Writer wraps another io.Writer by transforming the bytes read. +// The user needs to call Close to flush unwritten bytes that may +// be buffered. +type Writer struct { + w io.Writer + t Transformer + dst []byte + + // src[:n] contains bytes that have not yet passed through t. + src []byte + n int +} + +// NewWriter returns a new Writer that wraps w by transforming the bytes written +// via t. It calls Reset on t. +func NewWriter(w io.Writer, t Transformer) *Writer { + t.Reset() + return &Writer{ + w: w, + t: t, + dst: make([]byte, defaultBufSize), + src: make([]byte, defaultBufSize), + } +} + +// Write implements the io.Writer interface. If there are not enough +// bytes available to complete a Transform, the bytes will be buffered +// for the next write. Call Close to convert the remaining bytes. +func (w *Writer) Write(data []byte) (n int, err error) { + src := data + if w.n > 0 { + // Append bytes from data to the last remainder. + // TODO: limit the amount copied on first try. + n = copy(w.src[w.n:], data) + w.n += n + src = w.src[:w.n] + } + for { + nDst, nSrc, err := w.t.Transform(w.dst, src, false) + if _, werr := w.w.Write(w.dst[:nDst]); werr != nil { + return n, werr + } + src = src[nSrc:] + if w.n == 0 { + n += nSrc + } else if len(src) <= n { + // Enough bytes from w.src have been consumed. We make src point + // to data instead to reduce the copying. + w.n = 0 + n -= len(src) + src = data[n:] + if n < len(data) && (err == nil || err == ErrShortSrc) { + continue + } + } + switch err { + case ErrShortDst: + // This error is okay as long as we are making progress. + if nDst > 0 || nSrc > 0 { + continue + } + case ErrShortSrc: + if len(src) < len(w.src) { + m := copy(w.src, src) + // If w.n > 0, bytes from data were already copied to w.src and n + // was already set to the number of bytes consumed. + if w.n == 0 { + n += m + } + w.n = m + err = nil + } else if nDst > 0 || nSrc > 0 { + // Not enough buffer to store the remainder. Keep processing as + // long as there is progress. Without this case, transforms that + // require a lookahead larger than the buffer may result in an + // error. This is not something one may expect to be common in + // practice, but it may occur when buffers are set to small + // sizes during testing. + continue + } + case nil: + if w.n > 0 { + err = errInconsistentByteCount + } + } + return n, err + } +} + +// Close implements the io.Closer interface. +func (w *Writer) Close() error { + src := w.src[:w.n] + for { + nDst, nSrc, err := w.t.Transform(w.dst, src, true) + if _, werr := w.w.Write(w.dst[:nDst]); werr != nil { + return werr + } + if err != ErrShortDst { + return err + } + src = src[nSrc:] + } +} + +type nop struct{ NopResetter } + +func (nop) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + n := copy(dst, src) + if n < len(src) { + err = ErrShortDst + } + return n, n, err +} + +func (nop) Span(src []byte, atEOF bool) (n int, err error) { + return len(src), nil +} + +type discard struct{ NopResetter } + +func (discard) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + return 0, len(src), nil +} + +var ( + // Discard is a Transformer for which all Transform calls succeed + // by consuming all bytes and writing nothing. + Discard Transformer = discard{} + + // Nop is a SpanningTransformer that copies src to dst. + Nop SpanningTransformer = nop{} +) + +// chain is a sequence of links. A chain with N Transformers has N+1 links and +// N+1 buffers. Of those N+1 buffers, the first and last are the src and dst +// buffers given to chain.Transform and the middle N-1 buffers are intermediate +// buffers owned by the chain. The i'th link transforms bytes from the i'th +// buffer chain.link[i].b at read offset chain.link[i].p to the i+1'th buffer +// chain.link[i+1].b at write offset chain.link[i+1].n, for i in [0, N). +type chain struct { + link []link + err error + // errStart is the index at which the error occurred plus 1. Processing + // errStart at this level at the next call to Transform. As long as + // errStart > 0, chain will not consume any more source bytes. + errStart int +} + +func (c *chain) fatalError(errIndex int, err error) { + if i := errIndex + 1; i > c.errStart { + c.errStart = i + c.err = err + } +} + +type link struct { + t Transformer + // b[p:n] holds the bytes to be transformed by t. + b []byte + p int + n int +} + +func (l *link) src() []byte { + return l.b[l.p:l.n] +} + +func (l *link) dst() []byte { + return l.b[l.n:] +} + +// Chain returns a Transformer that applies t in sequence. +func Chain(t ...Transformer) Transformer { + if len(t) == 0 { + return nop{} + } + c := &chain{link: make([]link, len(t)+1)} + for i, tt := range t { + c.link[i].t = tt + } + // Allocate intermediate buffers. + b := make([][defaultBufSize]byte, len(t)-1) + for i := range b { + c.link[i+1].b = b[i][:] + } + return c +} + +// Reset resets the state of Chain. It calls Reset on all the Transformers. +func (c *chain) Reset() { + for i, l := range c.link { + if l.t != nil { + l.t.Reset() + } + c.link[i].p, c.link[i].n = 0, 0 + } +} + +// TODO: make chain use Span (is going to be fun to implement!) + +// Transform applies the transformers of c in sequence. +func (c *chain) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + // Set up src and dst in the chain. + srcL := &c.link[0] + dstL := &c.link[len(c.link)-1] + srcL.b, srcL.p, srcL.n = src, 0, len(src) + dstL.b, dstL.n = dst, 0 + var lastFull, needProgress bool // for detecting progress + + // i is the index of the next Transformer to apply, for i in [low, high]. + // low is the lowest index for which c.link[low] may still produce bytes. + // high is the highest index for which c.link[high] has a Transformer. + // The error returned by Transform determines whether to increase or + // decrease i. We try to completely fill a buffer before converting it. + for low, i, high := c.errStart, c.errStart, len(c.link)-2; low <= i && i <= high; { + in, out := &c.link[i], &c.link[i+1] + nDst, nSrc, err0 := in.t.Transform(out.dst(), in.src(), atEOF && low == i) + out.n += nDst + in.p += nSrc + if i > 0 && in.p == in.n { + in.p, in.n = 0, 0 + } + needProgress, lastFull = lastFull, false + switch err0 { + case ErrShortDst: + // Process the destination buffer next. Return if we are already + // at the high index. + if i == high { + return dstL.n, srcL.p, ErrShortDst + } + if out.n != 0 { + i++ + // If the Transformer at the next index is not able to process any + // source bytes there is nothing that can be done to make progress + // and the bytes will remain unprocessed. lastFull is used to + // detect this and break out of the loop with a fatal error. + lastFull = true + continue + } + // The destination buffer was too small, but is completely empty. + // Return a fatal error as this transformation can never complete. + c.fatalError(i, errShortInternal) + case ErrShortSrc: + if i == 0 { + // Save ErrShortSrc in err. All other errors take precedence. + err = ErrShortSrc + break + } + // Source bytes were depleted before filling up the destination buffer. + // Verify we made some progress, move the remaining bytes to the errStart + // and try to get more source bytes. + if needProgress && nSrc == 0 || in.n-in.p == len(in.b) { + // There were not enough source bytes to proceed while the source + // buffer cannot hold any more bytes. Return a fatal error as this + // transformation can never complete. + c.fatalError(i, errShortInternal) + break + } + // in.b is an internal buffer and we can make progress. + in.p, in.n = 0, copy(in.b, in.src()) + fallthrough + case nil: + // if i == low, we have depleted the bytes at index i or any lower levels. + // In that case we increase low and i. In all other cases we decrease i to + // fetch more bytes before proceeding to the next index. + if i > low { + i-- + continue + } + default: + c.fatalError(i, err0) + } + // Exhausted level low or fatal error: increase low and continue + // to process the bytes accepted so far. + i++ + low = i + } + + // If c.errStart > 0, this means we found a fatal error. We will clear + // all upstream buffers. At this point, no more progress can be made + // downstream, as Transform would have bailed while handling ErrShortDst. + if c.errStart > 0 { + for i := 1; i < c.errStart; i++ { + c.link[i].p, c.link[i].n = 0, 0 + } + err, c.errStart, c.err = c.err, 0, nil + } + return dstL.n, srcL.p, err +} + +// Deprecated: Use runes.Remove instead. +func RemoveFunc(f func(r rune) bool) Transformer { + return removeF(f) +} + +type removeF func(r rune) bool + +func (removeF) Reset() {} + +// Transform implements the Transformer interface. +func (t removeF) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for r, sz := rune(0), 0; len(src) > 0; src = src[sz:] { + + if r = rune(src[0]); r < utf8.RuneSelf { + sz = 1 + } else { + r, sz = utf8.DecodeRune(src) + + if sz == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src) { + err = ErrShortSrc + break + } + // We replace illegal bytes with RuneError. Not doing so might + // otherwise turn a sequence of invalid UTF-8 into valid UTF-8. + // The resulting byte sequence may subsequently contain runes + // for which t(r) is true that were passed unnoticed. + if !t(r) { + if nDst+3 > len(dst) { + err = ErrShortDst + break + } + nDst += copy(dst[nDst:], "\uFFFD") + } + nSrc++ + continue + } + } + + if !t(r) { + if nDst+sz > len(dst) { + err = ErrShortDst + break + } + nDst += copy(dst[nDst:], src[:sz]) + } + nSrc += sz + } + return +} + +// grow returns a new []byte that is longer than b, and copies the first n bytes +// of b to the start of the new slice. +func grow(b []byte, n int) []byte { + m := len(b) + if m <= 32 { + m = 64 + } else if m <= 256 { + m *= 2 + } else { + m += m >> 1 + } + buf := make([]byte, m) + copy(buf, b[:n]) + return buf +} + +const initialBufSize = 128 + +// String returns a string with the result of converting s[:n] using t, where +// n <= len(s). If err == nil, n will be len(s). It calls Reset on t. +func String(t Transformer, s string) (result string, n int, err error) { + t.Reset() + if s == "" { + // Fast path for the common case for empty input. Results in about a + // 86% reduction of running time for BenchmarkStringLowerEmpty. + if _, _, err := t.Transform(nil, nil, true); err == nil { + return "", 0, nil + } + } + + // Allocate only once. Note that both dst and src escape when passed to + // Transform. + buf := [2 * initialBufSize]byte{} + dst := buf[:initialBufSize:initialBufSize] + src := buf[initialBufSize : 2*initialBufSize] + + // The input string s is transformed in multiple chunks (starting with a + // chunk size of initialBufSize). nDst and nSrc are per-chunk (or + // per-Transform-call) indexes, pDst and pSrc are overall indexes. + nDst, nSrc := 0, 0 + pDst, pSrc := 0, 0 + + // pPrefix is the length of a common prefix: the first pPrefix bytes of the + // result will equal the first pPrefix bytes of s. It is not guaranteed to + // be the largest such value, but if pPrefix, len(result) and len(s) are + // all equal after the final transform (i.e. calling Transform with atEOF + // being true returned nil error) then we don't need to allocate a new + // result string. + pPrefix := 0 + for { + // Invariant: pDst == pPrefix && pSrc == pPrefix. + + n := copy(src, s[pSrc:]) + nDst, nSrc, err = t.Transform(dst, src[:n], pSrc+n == len(s)) + pDst += nDst + pSrc += nSrc + + // TODO: let transformers implement an optional Spanner interface, akin + // to norm's QuickSpan. This would even allow us to avoid any allocation. + if !bytes.Equal(dst[:nDst], src[:nSrc]) { + break + } + pPrefix = pSrc + if err == ErrShortDst { + // A buffer can only be short if a transformer modifies its input. + break + } else if err == ErrShortSrc { + if nSrc == 0 { + // No progress was made. + break + } + // Equal so far and !atEOF, so continue checking. + } else if err != nil || pPrefix == len(s) { + return string(s[:pPrefix]), pPrefix, err + } + } + // Post-condition: pDst == pPrefix + nDst && pSrc == pPrefix + nSrc. + + // We have transformed the first pSrc bytes of the input s to become pDst + // transformed bytes. Those transformed bytes are discontiguous: the first + // pPrefix of them equal s[:pPrefix] and the last nDst of them equal + // dst[:nDst]. We copy them around, into a new dst buffer if necessary, so + // that they become one contiguous slice: dst[:pDst]. + if pPrefix != 0 { + newDst := dst + if pDst > len(newDst) { + newDst = make([]byte, len(s)+nDst-nSrc) + } + copy(newDst[pPrefix:pDst], dst[:nDst]) + copy(newDst[:pPrefix], s[:pPrefix]) + dst = newDst + } + + // Prevent duplicate Transform calls with atEOF being true at the end of + // the input. Also return if we have an unrecoverable error. + if (err == nil && pSrc == len(s)) || + (err != nil && err != ErrShortDst && err != ErrShortSrc) { + return string(dst[:pDst]), pSrc, err + } + + // Transform the remaining input, growing dst and src buffers as necessary. + for { + n := copy(src, s[pSrc:]) + nDst, nSrc, err := t.Transform(dst[pDst:], src[:n], pSrc+n == len(s)) + pDst += nDst + pSrc += nSrc + + // If we got ErrShortDst or ErrShortSrc, do not grow as long as we can + // make progress. This may avoid excessive allocations. + if err == ErrShortDst { + if nDst == 0 { + dst = grow(dst, pDst) + } + } else if err == ErrShortSrc { + if nSrc == 0 { + src = grow(src, 0) + } + } else if err != nil || pSrc == len(s) { + return string(dst[:pDst]), pSrc, err + } + } +} + +// Bytes returns a new byte slice with the result of converting b[:n] using t, +// where n <= len(b). If err == nil, n will be len(b). It calls Reset on t. +func Bytes(t Transformer, b []byte) (result []byte, n int, err error) { + return doAppend(t, 0, make([]byte, len(b)), b) +} + +// Append appends the result of converting src[:n] using t to dst, where +// n <= len(src), If err == nil, n will be len(src). It calls Reset on t. +func Append(t Transformer, dst, src []byte) (result []byte, n int, err error) { + if len(dst) == cap(dst) { + n := len(src) + len(dst) // It is okay for this to be 0. + b := make([]byte, n) + dst = b[:copy(b, dst)] + } + return doAppend(t, len(dst), dst[:cap(dst)], src) +} + +func doAppend(t Transformer, pDst int, dst, src []byte) (result []byte, n int, err error) { + t.Reset() + pSrc := 0 + for { + nDst, nSrc, err := t.Transform(dst[pDst:], src[pSrc:], true) + pDst += nDst + pSrc += nSrc + if err != ErrShortDst { + return dst[:pDst], pSrc, err + } + + // Grow the destination buffer, but do not grow as long as we can make + // progress. This may avoid excessive allocations. + if nDst == 0 { + dst = grow(dst, pDst) + } + } +} -- cgit