aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--api/auth/context.go2
-rw-r--r--api/graphql/graphql_test.go5
-rw-r--r--api/graphql/models/lazy_bug.go6
-rw-r--r--api/graphql/models/lazy_identity.go6
-rw-r--r--api/graphql/resolvers/mutation.go6
-rw-r--r--api/graphql/resolvers/repo.go14
-rw-r--r--api/http/git_file_handlers_test.go4
-rw-r--r--bridge/core/config.go7
-rw-r--r--bridge/github/config.go12
-rw-r--r--bridge/github/export.go9
-rw-r--r--bridge/github/export_test.go29
-rw-r--r--bridge/github/import.go15
-rw-r--r--bridge/github/import_integration_test.go13
-rw-r--r--bridge/github/import_test.go6
-rw-r--r--bridge/gitlab/export.go9
-rw-r--r--bridge/gitlab/export_test.go22
-rw-r--r--bridge/gitlab/import.go11
-rw-r--r--bridge/gitlab/import_test.go6
-rw-r--r--bridge/jira/export.go9
-rw-r--r--bridge/jira/import.go12
-rw-r--r--bridge/launchpad/import.go13
-rw-r--r--cache/bug_cache.go137
-rw-r--r--cache/bug_excerpt.go48
-rw-r--r--cache/bug_subcache.go254
-rw-r--r--cache/cached.go111
-rw-r--r--cache/filter.go58
-rw-r--r--cache/identity_cache.go32
-rw-r--r--cache/identity_excerpt.go21
-rw-r--r--cache/identity_subcache.go124
-rw-r--r--cache/lru_id_cache.go36
-rw-r--r--cache/multi_repo_cache.go26
-rw-r--r--cache/repo_cache.go232
-rw-r--r--cache/repo_cache_bug.go556
-rw-r--r--cache/repo_cache_common.go155
-rw-r--r--cache/repo_cache_identity.go271
-rw-r--r--cache/repo_cache_test.go170
-rw-r--r--cache/resolvers.go42
-rw-r--r--cache/subcache.go505
-rw-r--r--cache/with_snapshot.go56
-rw-r--r--commands/bridge/bridge_auth_addtoken.go2
-rw-r--r--commands/bug/bug.go38
-rw-r--r--commands/bug/bug_comment_edit.go2
-rw-r--r--commands/bug/bug_new.go2
-rw-r--r--commands/bug/bug_rm.go2
-rw-r--r--commands/bug/bug_select.go2
-rw-r--r--commands/bug/select/select.go11
-rw-r--r--commands/bug/select/select_test.go10
-rw-r--r--commands/bug/testenv/testenv.go6
-rw-r--r--commands/cmdjson/json_common.go4
-rw-r--r--commands/completion/helper_completion.go24
-rw-r--r--commands/execenv/env.go19
-rw-r--r--commands/execenv/env_testing.go6
-rw-r--r--commands/label.go2
-rw-r--r--commands/root.go2
-rw-r--r--commands/user/user.go6
-rw-r--r--commands/user/user_adopt.go2
-rw-r--r--commands/user/user_new.go2
-rw-r--r--commands/user/user_show.go2
-rw-r--r--commands/webui.go18
-rw-r--r--doc/README.md12
-rw-r--r--entities/bug/bug.go57
-rw-r--r--entities/bug/bug_actions.go23
-rw-r--r--entities/bug/err.go17
-rw-r--r--entities/bug/operation.go7
-rw-r--r--entities/bug/resolver.go2
-rw-r--r--entities/bug/snapshot.go4
-rw-r--r--entities/bug/with_snapshot.go53
-rw-r--r--entities/identity/common.go9
-rw-r--r--entities/identity/identity.go38
-rw-r--r--entities/identity/identity_actions.go4
-rw-r--r--entities/identity/identity_actions_test.go5
-rw-r--r--entities/identity/identity_test.go7
-rw-r--r--entities/identity/identity_user.go2
-rw-r--r--entities/identity/resolver.go15
-rw-r--r--entity/dag/common_test.go12
-rw-r--r--entity/dag/entity.go63
-rw-r--r--entity/dag/entity_actions.go14
-rw-r--r--entity/dag/entity_actions_test.go34
-rw-r--r--entity/dag/entity_test.go12
-rw-r--r--entity/dag/example_test.go14
-rw-r--r--entity/dag/interface.go6
-rw-r--r--entity/dag/op_set_metadata_test.go6
-rw-r--r--entity/dag/operation.go9
-rw-r--r--entity/err.go33
-rw-r--r--entity/interface.go2
-rw-r--r--entity/resolver.go29
-rw-r--r--entity/streamed.go6
-rw-r--r--go.mod12
-rw-r--r--go.sum22
-rw-r--r--repository/gogit.go156
-rw-r--r--repository/gogit_test.go11
-rw-r--r--repository/hash.go2
-rw-r--r--repository/index_bleve.go154
-rw-r--r--repository/mock_repo.go142
-rw-r--r--repository/repo.go54
-rw-r--r--repository/repo_testing.go84
-rw-r--r--termui/bug_table.go12
-rw-r--r--termui/label_select.go2
-rw-r--r--termui/termui.go2
-rw-r--r--util/multierr/errwaitgroup.go115
-rw-r--r--util/multierr/join.go51
101 files changed, 2476 insertions, 2050 deletions
diff --git a/api/auth/context.go b/api/auth/context.go
index 17171261..2547aaca 100644
--- a/api/auth/context.go
+++ b/api/auth/context.go
@@ -24,5 +24,5 @@ func UserFromCtx(ctx context.Context, r *cache.RepoCache) (*cache.IdentityCache,
if !ok {
return nil, ErrNotAuthenticated
}
- return r.ResolveIdentity(id)
+ return r.Identities().Resolve(id)
}
diff --git a/api/graphql/graphql_test.go b/api/graphql/graphql_test.go
index 2ddfb314..a8dfad3f 100644
--- a/api/graphql/graphql_test.go
+++ b/api/graphql/graphql_test.go
@@ -19,8 +19,11 @@ func TestQueries(t *testing.T) {
random_bugs.FillRepoWithSeed(repo, 10, 42)
mrc := cache.NewMultiRepoCache()
- _, err := mrc.RegisterDefaultRepository(repo)
+ _, events, err := mrc.RegisterDefaultRepository(repo)
require.NoError(t, err)
+ for event := range events {
+ require.NoError(t, event.Err)
+ }
handler := NewHandler(mrc, nil)
diff --git a/api/graphql/models/lazy_bug.go b/api/graphql/models/lazy_bug.go
index 318fdc99..4b0b598e 100644
--- a/api/graphql/models/lazy_bug.go
+++ b/api/graphql/models/lazy_bug.go
@@ -58,7 +58,7 @@ func (lb *lazyBug) load() error {
return nil
}
- b, err := lb.cache.ResolveBug(lb.excerpt.Id)
+ b, err := lb.cache.Bugs().Resolve(lb.excerpt.Id())
if err != nil {
return err
}
@@ -68,7 +68,7 @@ func (lb *lazyBug) load() error {
}
func (lb *lazyBug) identity(id entity.Id) (IdentityWrapper, error) {
- i, err := lb.cache.ResolveIdentityExcerpt(id)
+ i, err := lb.cache.Identities().ResolveExcerpt(id)
if err != nil {
return nil, err
}
@@ -79,7 +79,7 @@ func (lb *lazyBug) identity(id entity.Id) (IdentityWrapper, error) {
func (lb *lazyBug) IsAuthored() {}
func (lb *lazyBug) Id() entity.Id {
- return lb.excerpt.Id
+ return lb.excerpt.Id()
}
func (lb *lazyBug) LastEdit() time.Time {
diff --git a/api/graphql/models/lazy_identity.go b/api/graphql/models/lazy_identity.go
index 27bc9619..c19d077b 100644
--- a/api/graphql/models/lazy_identity.go
+++ b/api/graphql/models/lazy_identity.go
@@ -48,16 +48,16 @@ func (li *lazyIdentity) load() (*cache.IdentityCache, error) {
return li.id, nil
}
- id, err := li.cache.ResolveIdentity(li.excerpt.Id)
+ id, err := li.cache.Identities().Resolve(li.excerpt.Id())
if err != nil {
- return nil, fmt.Errorf("cache: missing identity %v", li.excerpt.Id)
+ return nil, fmt.Errorf("cache: missing identity %v", li.excerpt.Id())
}
li.id = id
return id, nil
}
func (li *lazyIdentity) Id() entity.Id {
- return li.excerpt.Id
+ return li.excerpt.Id()
}
func (li *lazyIdentity) Name() string {
diff --git a/api/graphql/resolvers/mutation.go b/api/graphql/resolvers/mutation.go
index 3f9f7fe1..32e1fa7c 100644
--- a/api/graphql/resolvers/mutation.go
+++ b/api/graphql/resolvers/mutation.go
@@ -32,7 +32,7 @@ func (r mutationResolver) getBug(repoRef *string, bugPrefix string) (*cache.Repo
return nil, nil, err
}
- b, err := repo.ResolveBugPrefix(bugPrefix)
+ b, err := repo.Bugs().ResolvePrefix(bugPrefix)
if err != nil {
return nil, nil, err
}
@@ -50,7 +50,7 @@ func (r mutationResolver) NewBug(ctx context.Context, input models.NewBugInput)
return nil, err
}
- b, op, err := repo.NewBugRaw(author,
+ b, op, err := repo.Bugs().NewRaw(author,
time.Now().Unix(),
text.CleanupOneLine(input.Title),
text.Cleanup(input.Message),
@@ -181,7 +181,7 @@ func (r mutationResolver) EditComment(ctx context.Context, input models.EditComm
return nil, err
}
- b, target, err := repo.ResolveComment(input.TargetPrefix)
+ b, target, err := repo.Bugs().ResolveComment(input.TargetPrefix)
if err != nil {
return nil, err
}
diff --git a/api/graphql/resolvers/repo.go b/api/graphql/resolvers/repo.go
index 3fcaada1..67b03628 100644
--- a/api/graphql/resolvers/repo.go
+++ b/api/graphql/resolvers/repo.go
@@ -41,7 +41,7 @@ func (repoResolver) AllBugs(_ context.Context, obj *models.Repository, after *st
}
// Simply pass a []string with the ids to the pagination algorithm
- source, err := obj.Repo.QueryBugs(q)
+ source, err := obj.Repo.Bugs().Query(q)
if err != nil {
return nil, err
}
@@ -60,7 +60,7 @@ func (repoResolver) AllBugs(_ context.Context, obj *models.Repository, after *st
nodes := make([]models.BugWrapper, len(lazyBugEdges))
for i, lazyBugEdge := range lazyBugEdges {
- excerpt, err := obj.Repo.ResolveBugExcerpt(lazyBugEdge.Id)
+ excerpt, err := obj.Repo.Bugs().ResolveExcerpt(lazyBugEdge.Id)
if err != nil {
return nil, err
}
@@ -86,7 +86,7 @@ func (repoResolver) AllBugs(_ context.Context, obj *models.Repository, after *st
}
func (repoResolver) Bug(_ context.Context, obj *models.Repository, prefix string) (models.BugWrapper, error) {
- excerpt, err := obj.Repo.ResolveBugExcerptPrefix(prefix)
+ excerpt, err := obj.Repo.Bugs().ResolveExcerptPrefix(prefix)
if err != nil {
return nil, err
}
@@ -103,7 +103,7 @@ func (repoResolver) AllIdentities(_ context.Context, obj *models.Repository, aft
}
// Simply pass a []string with the ids to the pagination algorithm
- source := obj.Repo.AllIdentityIds()
+ source := obj.Repo.Identities().AllIds()
// The edger create a custom edge holding just the id
edger := func(id entity.Id, offset int) connections.Edge {
@@ -119,7 +119,7 @@ func (repoResolver) AllIdentities(_ context.Context, obj *models.Repository, aft
nodes := make([]models.IdentityWrapper, len(lazyIdentityEdges))
for k, lazyIdentityEdge := range lazyIdentityEdges {
- excerpt, err := obj.Repo.ResolveIdentityExcerpt(lazyIdentityEdge.Id)
+ excerpt, err := obj.Repo.Identities().ResolveExcerpt(lazyIdentityEdge.Id)
if err != nil {
return nil, err
}
@@ -145,7 +145,7 @@ func (repoResolver) AllIdentities(_ context.Context, obj *models.Repository, aft
}
func (repoResolver) Identity(_ context.Context, obj *models.Repository, prefix string) (models.IdentityWrapper, error) {
- excerpt, err := obj.Repo.ResolveIdentityExcerptPrefix(prefix)
+ excerpt, err := obj.Repo.Identities().ResolveExcerptPrefix(prefix)
if err != nil {
return nil, err
}
@@ -187,5 +187,5 @@ func (repoResolver) ValidLabels(_ context.Context, obj *models.Repository, after
}, nil
}
- return connections.LabelCon(obj.Repo.ValidLabels(), edger, conMaker, input)
+ return connections.LabelCon(obj.Repo.Bugs().ValidLabels(), edger, conMaker, input)
}
diff --git a/api/http/git_file_handlers_test.go b/api/http/git_file_handlers_test.go
index 736bf75e..b2371abf 100644
--- a/api/http/git_file_handlers_test.go
+++ b/api/http/git_file_handlers_test.go
@@ -22,10 +22,10 @@ func TestGitFileHandlers(t *testing.T) {
repo := repository.CreateGoGitTestRepo(t, false)
mrc := cache.NewMultiRepoCache()
- repoCache, err := mrc.RegisterDefaultRepository(repo)
+ repoCache, _, err := mrc.RegisterDefaultRepository(repo)
require.NoError(t, err)
- author, err := repoCache.NewIdentity("test identity", "test@test.org")
+ author, err := repoCache.Identities().New("test identity", "test@test.org")
require.NoError(t, err)
err = repoCache.SetUserIdentity(author)
diff --git a/bridge/core/config.go b/bridge/core/config.go
index 45f1afa4..ed079eb8 100644
--- a/bridge/core/config.go
+++ b/bridge/core/config.go
@@ -5,12 +5,13 @@ import (
"github.com/MichaelMure/git-bug/cache"
"github.com/MichaelMure/git-bug/entities/identity"
+ "github.com/MichaelMure/git-bug/entity"
)
func FinishConfig(repo *cache.RepoCache, metaKey string, login string) error {
// if no user exist with the given login metadata
- _, err := repo.ResolveIdentityImmutableMetadata(metaKey, login)
- if err != nil && err != identity.ErrIdentityNotExist {
+ _, err := repo.Identities().ResolveIdentityImmutableMetadata(metaKey, login)
+ if err != nil && !entity.IsErrNotFound(err) {
// real error
return err
}
@@ -33,7 +34,7 @@ func FinishConfig(repo *cache.RepoCache, metaKey string, login string) error {
}
// otherwise create a user with that metadata
- i, err := repo.NewIdentityFromGitUserRaw(map[string]string{
+ i, err := repo.Identities().NewFromGitUserRaw(map[string]string{
metaKey: login,
})
if err != nil {
diff --git a/bridge/github/config.go b/bridge/github/config.go
index 6b847394..2f5d1f3b 100644
--- a/bridge/github/config.go
+++ b/bridge/github/config.go
@@ -5,7 +5,6 @@ import (
"encoding/json"
"fmt"
"io/ioutil"
- "math/rand"
"net/http"
"net/url"
"regexp"
@@ -319,17 +318,6 @@ func pollGithubForAuthorization(deviceCode string, intervalSec int64) (string, e
}
}
-func randomFingerprint() string {
- // Doesn't have to be crypto secure, it's just to avoid token collision
- rand.Seed(time.Now().UnixNano())
- var letterRunes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
- b := make([]rune, 32)
- for i := range b {
- b[i] = letterRunes[rand.Intn(len(letterRunes))]
- }
- return string(b)
-}
-
func promptTokenOptions(repo repository.RepoKeyring, login, owner, project string) (auth.Credential, error) {
creds, err := auth.List(repo,
auth.WithTarget(target),
diff --git a/bridge/github/export.go b/bridge/github/export.go
index 675ed039..0d340b49 100644
--- a/bridge/github/export.go
+++ b/bridge/github/export.go
@@ -20,7 +20,6 @@ import (
"github.com/MichaelMure/git-bug/cache"
"github.com/MichaelMure/git-bug/entities/bug"
"github.com/MichaelMure/git-bug/entities/common"
- "github.com/MichaelMure/git-bug/entities/identity"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/entity/dag"
)
@@ -89,8 +88,8 @@ func (ge *githubExporter) cacheAllClient(repo *cache.RepoCache) error {
continue
}
- user, err := repo.ResolveIdentityImmutableMetadata(metaKeyGithubLogin, login)
- if err == identity.ErrIdentityNotExist {
+ user, err := repo.Identities().ResolveIdentityImmutableMetadata(metaKeyGithubLogin, login)
+ if entity.IsErrNotFound(err) {
continue
}
if err != nil {
@@ -160,10 +159,10 @@ func (ge *githubExporter) ExportAll(ctx context.Context, repo *cache.RepoCache,
allIdentitiesIds = append(allIdentitiesIds, id)
}
- allBugsIds := repo.AllBugsIds()
+ allBugsIds := repo.Bugs().AllIds()
for _, id := range allBugsIds {
- b, err := repo.ResolveBug(id)
+ b, err := repo.Bugs().Resolve(id)
if err != nil {
out <- core.NewExportError(errors.Wrap(err, "can't load bug"), id)
return
diff --git a/bridge/github/export_test.go b/bridge/github/export_test.go
index 2ebe9622..e06457d4 100644
--- a/bridge/github/export_test.go
+++ b/bridge/github/export_test.go
@@ -34,18 +34,18 @@ type testCase struct {
func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
// simple bug
- simpleBug, _, err := repo.NewBug("simple bug", "new bug")
+ simpleBug, _, err := repo.Bugs().New("simple bug", "new bug")
require.NoError(t, err)
// bug with comments
- bugWithComments, _, err := repo.NewBug("bug with comments", "new bug")
+ bugWithComments, _, err := repo.Bugs().New("bug with comments", "new bug")
require.NoError(t, err)
_, _, err = bugWithComments.AddComment("new comment")
require.NoError(t, err)
// bug with label changes
- bugLabelChange, _, err := repo.NewBug("bug label change", "new bug")
+ bugLabelChange, _, err := repo.Bugs().New("bug label change", "new bug")
require.NoError(t, err)
_, _, err = bugLabelChange.ChangeLabels([]string{"bug"}, nil)
@@ -64,7 +64,7 @@ func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
require.NoError(t, err)
// bug with comments editions
- bugWithCommentEditions, createOp, err := repo.NewBug("bug with comments editions", "new bug")
+ bugWithCommentEditions, createOp, err := repo.Bugs().New("bug with comments editions", "new bug")
require.NoError(t, err)
_, err = bugWithCommentEditions.EditComment(
@@ -78,7 +78,7 @@ func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
require.NoError(t, err)
// bug status changed
- bugStatusChanged, _, err := repo.NewBug("bug status changed", "new bug")
+ bugStatusChanged, _, err := repo.Bugs().New("bug status changed", "new bug")
require.NoError(t, err)
_, err = bugStatusChanged.Close()
@@ -88,7 +88,7 @@ func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
require.NoError(t, err)
// bug title changed
- bugTitleEdited, _, err := repo.NewBug("bug title edited", "new bug")
+ bugTitleEdited, _, err := repo.Bugs().New("bug title edited", "new bug")
require.NoError(t, err)
_, err = bugTitleEdited.SetTitle("bug title edited again")
@@ -141,12 +141,12 @@ func TestGithubPushPull(t *testing.T) {
// create repo backend
repo := repository.CreateGoGitTestRepo(t, false)
- backend, err := cache.NewRepoCache(repo)
+ backend, err := cache.NewRepoCacheNoEvents(repo)
require.NoError(t, err)
// set author identity
login := "identity-test"
- author, err := backend.NewIdentity("test identity", "test@test.org")
+ author, err := backend.Identities().New("test identity", "test@test.org")
require.NoError(t, err)
author.SetMetadata(metaKeyGithubLogin, login)
err = author.Commit()
@@ -158,11 +158,18 @@ func TestGithubPushPull(t *testing.T) {
defer backend.Close()
interrupt.RegisterCleaner(backend.Close)
+ // Setup token + cleanup
token := auth.NewToken(target, envToken)
token.SetMetadata(auth.MetaKeyLogin, login)
err = auth.Store(repo, token)
require.NoError(t, err)
+ cleanToken := func() error {
+ return auth.Remove(repo, token.ID())
+ }
+ defer cleanToken()
+ interrupt.RegisterCleaner(cleanToken)
+
tests := testCases(t, backend)
// generate project name
@@ -217,7 +224,7 @@ func TestGithubPushPull(t *testing.T) {
repoTwo := repository.CreateGoGitTestRepo(t, false)
// create a second backend
- backendTwo, err := cache.NewRepoCache(repoTwo)
+ backendTwo, err := cache.NewRepoCacheNoEvents(repoTwo)
require.NoError(t, err)
importer := &githubImporter{}
@@ -236,7 +243,7 @@ func TestGithubPushPull(t *testing.T) {
require.NoError(t, result.Err)
}
- require.Len(t, backendTwo.AllBugsIds(), len(tests))
+ require.Len(t, backendTwo.Bugs().AllIds(), len(tests))
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
@@ -261,7 +268,7 @@ func TestGithubPushPull(t *testing.T) {
require.True(t, ok)
// retrieve bug from backendTwo
- importedBug, err := backendTwo.ResolveBugCreateMetadata(metaKeyGithubId, bugGithubID)
+ importedBug, err := backendTwo.Bugs().ResolveBugCreateMetadata(metaKeyGithubId, bugGithubID)
require.NoError(t, err)
// verify bug have same number of original operations
diff --git a/bridge/github/import.go b/bridge/github/import.go
index 7ccac3fb..4a51d117 100644
--- a/bridge/github/import.go
+++ b/bridge/github/import.go
@@ -10,7 +10,6 @@ import (
"github.com/MichaelMure/git-bug/bridge/core"
"github.com/MichaelMure/git-bug/bridge/core/auth"
"github.com/MichaelMure/git-bug/cache"
- "github.com/MichaelMure/git-bug/entities/bug"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/util/text"
)
@@ -183,14 +182,14 @@ func (gi *githubImporter) ensureIssue(ctx context.Context, repo *cache.RepoCache
}
// resolve bug
- b, err := repo.ResolveBugMatcher(func(excerpt *cache.BugExcerpt) bool {
+ b, err := repo.Bugs().ResolveMatcher(func(excerpt *cache.BugExcerpt) bool {
return excerpt.CreateMetadata[metaKeyGithubUrl] == issue.Url.String() &&
excerpt.CreateMetadata[metaKeyGithubId] == parseId(issue.Id)
})
if err == nil {
return b, nil
}
- if err != bug.ErrBugNotExist {
+ if !entity.IsErrNotFound(err) {
return nil, err
}
@@ -213,7 +212,7 @@ func (gi *githubImporter) ensureIssue(ctx context.Context, repo *cache.RepoCache
}
// create bug
- b, _, err = repo.NewBugRaw(
+ b, _, err = repo.Bugs().NewRaw(
author,
issue.CreatedAt.Unix(),
text.CleanupOneLine(title), // TODO: this is the *current* title, not the original one
@@ -498,7 +497,7 @@ func (gi *githubImporter) ensurePerson(ctx context.Context, repo *cache.RepoCach
}
// Look first in the cache
- i, err := repo.ResolveIdentityImmutableMetadata(metaKeyGithubLogin, string(actor.Login))
+ i, err := repo.Identities().ResolveIdentityImmutableMetadata(metaKeyGithubLogin, string(actor.Login))
if err == nil {
return i, nil
}
@@ -531,7 +530,7 @@ func (gi *githubImporter) ensurePerson(ctx context.Context, repo *cache.RepoCach
name = string(actor.Login)
}
- i, err = repo.NewIdentityRaw(
+ i, err = repo.Identities().NewRaw(
name,
email,
string(actor.Login),
@@ -553,7 +552,7 @@ func (gi *githubImporter) ensurePerson(ctx context.Context, repo *cache.RepoCach
func (gi *githubImporter) getGhost(ctx context.Context, repo *cache.RepoCache) (*cache.IdentityCache, error) {
loginName := "ghost"
// Look first in the cache
- i, err := repo.ResolveIdentityImmutableMetadata(metaKeyGithubLogin, loginName)
+ i, err := repo.Identities().ResolveIdentityImmutableMetadata(metaKeyGithubLogin, loginName)
if err == nil {
return i, nil
}
@@ -568,7 +567,7 @@ func (gi *githubImporter) getGhost(ctx context.Context, repo *cache.RepoCache) (
if user.Name != nil {
userName = string(*user.Name)
}
- return repo.NewIdentityRaw(
+ return repo.Identities().NewRaw(
userName,
"",
string(user.Login),
diff --git a/bridge/github/import_integration_test.go b/bridge/github/import_integration_test.go
index 50cbd5c8..8c411d8d 100644
--- a/bridge/github/import_integration_test.go
+++ b/bridge/github/import_integration_test.go
@@ -34,8 +34,9 @@ func TestGithubImporterIntegration(t *testing.T) {
// arrange
repo := repository.CreateGoGitTestRepo(t, false)
- backend, err := cache.NewRepoCache(repo)
+ backend, err := cache.NewRepoCacheNoEvents(repo)
require.NoError(t, err)
+
defer backend.Close()
interrupt.RegisterCleaner(backend.Close)
require.NoError(t, err)
@@ -48,17 +49,17 @@ func TestGithubImporterIntegration(t *testing.T) {
for e := range events {
require.NoError(t, e.Err)
}
- require.Len(t, backend.AllBugsIds(), 5)
- require.Len(t, backend.AllIdentityIds(), 2)
+ require.Len(t, backend.Bugs().AllIds(), 5)
+ require.Len(t, backend.Identities().AllIds(), 2)
- b1, err := backend.ResolveBugCreateMetadata(metaKeyGithubUrl, "https://github.com/marcus/to-himself/issues/1")
+ b1, err := backend.Bugs().ResolveBugCreateMetadata(metaKeyGithubUrl, "https://github.com/marcus/to-himself/issues/1")
require.NoError(t, err)
ops1 := b1.Snapshot().Operations
require.Equal(t, "marcus", ops1[0].Author().Name())
require.Equal(t, "title 1", ops1[0].(*bug.CreateOperation).Title)
require.Equal(t, "body text 1", ops1[0].(*bug.CreateOperation).Message)
- b3, err := backend.ResolveBugCreateMetadata(metaKeyGithubUrl, "https://github.com/marcus/to-himself/issues/3")
+ b3, err := backend.Bugs().ResolveBugCreateMetadata(metaKeyGithubUrl, "https://github.com/marcus/to-himself/issues/3")
require.NoError(t, err)
ops3 := b3.Snapshot().Operations
require.Equal(t, "issue 3 comment 1", ops3[1].(*bug.AddCommentOperation).Message)
@@ -66,7 +67,7 @@ func TestGithubImporterIntegration(t *testing.T) {
require.Equal(t, []bug.Label{"bug"}, ops3[3].(*bug.LabelChangeOperation).Added)
require.Equal(t, "title 3, edit 1", ops3[4].(*bug.SetTitleOperation).Title)
- b4, err := backend.ResolveBugCreateMetadata(metaKeyGithubUrl, "https://github.com/marcus/to-himself/issues/4")
+ b4, err := backend.Bugs().ResolveBugCreateMetadata(metaKeyGithubUrl, "https://github.com/marcus/to-himself/issues/4")
require.NoError(t, err)
ops4 := b4.Snapshot().Operations
require.Equal(t, "edited", ops4[1].(*bug.EditCommentOperation).Message)
diff --git a/bridge/github/import_test.go b/bridge/github/import_test.go
index 5575de98..5fafcce1 100644
--- a/bridge/github/import_test.go
+++ b/bridge/github/import_test.go
@@ -28,7 +28,7 @@ func TestGithubImporter(t *testing.T) {
repo := repository.CreateGoGitTestRepo(t, false)
- backend, err := cache.NewRepoCache(repo)
+ backend, err := cache.NewRepoCacheNoEvents(repo)
require.NoError(t, err)
defer backend.Close()
@@ -171,11 +171,11 @@ func TestGithubImporter(t *testing.T) {
fmt.Printf("test repository imported in %f seconds\n", time.Since(start).Seconds())
- require.Len(t, backend.AllBugsIds(), len(tests))
+ require.Len(t, backend.Bugs().AllIds(), len(tests))
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- b, err := backend.ResolveBugCreateMetadata(metaKeyGithubUrl, tt.url)
+ b, err := backend.Bugs().ResolveBugCreateMetadata(metaKeyGithubUrl, tt.url)
require.NoError(t, err)
ops := b.Snapshot().Operations
diff --git a/bridge/gitlab/export.go b/bridge/gitlab/export.go
index 83465428..b3a02447 100644
--- a/bridge/gitlab/export.go
+++ b/bridge/gitlab/export.go
@@ -15,7 +15,6 @@ import (
"github.com/MichaelMure/git-bug/cache"
"github.com/MichaelMure/git-bug/entities/bug"
"github.com/MichaelMure/git-bug/entities/common"
- "github.com/MichaelMure/git-bug/entities/identity"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/entity/dag"
)
@@ -74,8 +73,8 @@ func (ge *gitlabExporter) cacheAllClient(repo *cache.RepoCache, baseURL string)
continue
}
- user, err := repo.ResolveIdentityImmutableMetadata(metaKeyGitlabLogin, login)
- if err == identity.ErrIdentityNotExist {
+ user, err := repo.Identities().ResolveIdentityImmutableMetadata(metaKeyGitlabLogin, login)
+ if entity.IsErrNotFound(err) {
continue
}
if err != nil {
@@ -116,14 +115,14 @@ func (ge *gitlabExporter) ExportAll(ctx context.Context, repo *cache.RepoCache,
allIdentitiesIds = append(allIdentitiesIds, id)
}
- allBugsIds := repo.AllBugsIds()
+ allBugsIds := repo.Bugs().AllIds()
for _, id := range allBugsIds {
select {
case <-ctx.Done():
return
default:
- b, err := repo.ResolveBug(id)
+ b, err := repo.Bugs().Resolve(id)
if err != nil {
out <- core.NewExportError(err, id)
return
diff --git a/bridge/gitlab/export_test.go b/bridge/gitlab/export_test.go
index 47d5a9b1..7c826822 100644
--- a/bridge/gitlab/export_test.go
+++ b/bridge/gitlab/export_test.go
@@ -37,18 +37,18 @@ type testCase struct {
func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
// simple bug
- simpleBug, _, err := repo.NewBug("simple bug", "new bug")
+ simpleBug, _, err := repo.Bugs().New("simple bug", "new bug")
require.NoError(t, err)
// bug with comments
- bugWithComments, _, err := repo.NewBug("bug with comments", "new bug")
+ bugWithComments, _, err := repo.Bugs().New("bug with comments", "new bug")
require.NoError(t, err)
_, _, err = bugWithComments.AddComment("new comment")
require.NoError(t, err)
// bug with label changes
- bugLabelChange, _, err := repo.NewBug("bug label change", "new bug")
+ bugLabelChange, _, err := repo.Bugs().New("bug label change", "new bug")
require.NoError(t, err)
_, _, err = bugLabelChange.ChangeLabels([]string{"bug"}, nil)
@@ -61,7 +61,7 @@ func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
require.NoError(t, err)
// bug with comments editions
- bugWithCommentEditions, createOp, err := repo.NewBug("bug with comments editions", "new bug")
+ bugWithCommentEditions, createOp, err := repo.Bugs().New("bug with comments editions", "new bug")
require.NoError(t, err)
_, err = bugWithCommentEditions.EditComment(
@@ -75,7 +75,7 @@ func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
require.NoError(t, err)
// bug status changed
- bugStatusChanged, _, err := repo.NewBug("bug status changed", "new bug")
+ bugStatusChanged, _, err := repo.Bugs().New("bug status changed", "new bug")
require.NoError(t, err)
_, err = bugStatusChanged.Close()
@@ -85,7 +85,7 @@ func testCases(t *testing.T, repo *cache.RepoCache) []*testCase {
require.NoError(t, err)
// bug title changed
- bugTitleEdited, _, err := repo.NewBug("bug title edited", "new bug")
+ bugTitleEdited, _, err := repo.Bugs().New("bug title edited", "new bug")
require.NoError(t, err)
_, err = bugTitleEdited.SetTitle("bug title edited again")
@@ -147,12 +147,12 @@ func TestGitlabPushPull(t *testing.T) {
// create repo backend
repo := repository.CreateGoGitTestRepo(t, false)
- backend, err := cache.NewRepoCache(repo)
+ backend, err := cache.NewRepoCacheNoEvents(repo)
require.NoError(t, err)
// set author identity
login := "test-identity"
- author, err := backend.NewIdentity("test identity", "test@test.org")
+ author, err := backend.Identities().New("test identity", "test@test.org")
require.NoError(t, err)
author.SetMetadata(metaKeyGitlabLogin, login)
err = author.Commit()
@@ -220,7 +220,7 @@ func TestGitlabPushPull(t *testing.T) {
repoTwo := repository.CreateGoGitTestRepo(t, false)
// create a second backend
- backendTwo, err := cache.NewRepoCache(repoTwo)
+ backendTwo, err := cache.NewRepoCacheNoEvents(repoTwo)
require.NoError(t, err)
importer := &gitlabImporter{}
@@ -239,7 +239,7 @@ func TestGitlabPushPull(t *testing.T) {
require.NoError(t, result.Err)
}
- require.Len(t, backendTwo.AllBugsIds(), len(tests))
+ require.Len(t, backendTwo.Bugs().AllIds(), len(tests))
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
@@ -264,7 +264,7 @@ func TestGitlabPushPull(t *testing.T) {
require.True(t, ok)
// retrieve bug from backendTwo
- importedBug, err := backendTwo.ResolveBugCreateMetadata(metaKeyGitlabId, bugGitlabID)
+ importedBug, err := backendTwo.Bugs().ResolveBugCreateMetadata(metaKeyGitlabId, bugGitlabID)
require.NoError(t, err)
// verify bug have same number of original operations
diff --git a/bridge/gitlab/import.go b/bridge/gitlab/import.go
index c7909c8f..5947fb60 100644
--- a/bridge/gitlab/import.go
+++ b/bridge/gitlab/import.go
@@ -11,7 +11,6 @@ import (
"github.com/MichaelMure/git-bug/bridge/core"
"github.com/MichaelMure/git-bug/bridge/core/auth"
"github.com/MichaelMure/git-bug/cache"
- "github.com/MichaelMure/git-bug/entities/bug"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/util/text"
)
@@ -109,7 +108,7 @@ func (gi *gitlabImporter) ensureIssue(repo *cache.RepoCache, issue *gitlab.Issue
}
// resolve bug
- b, err := repo.ResolveBugMatcher(func(excerpt *cache.BugExcerpt) bool {
+ b, err := repo.Bugs().ResolveMatcher(func(excerpt *cache.BugExcerpt) bool {
return excerpt.CreateMetadata[core.MetaKeyOrigin] == target &&
excerpt.CreateMetadata[metaKeyGitlabId] == fmt.Sprintf("%d", issue.IID) &&
excerpt.CreateMetadata[metaKeyGitlabBaseUrl] == gi.conf[confKeyGitlabBaseUrl] &&
@@ -118,12 +117,12 @@ func (gi *gitlabImporter) ensureIssue(repo *cache.RepoCache, issue *gitlab.Issue
if err == nil {
return b, nil
}
- if err != bug.ErrBugNotExist {
+ if !entity.IsErrNotFound(err) {
return nil, err
}
// if bug was never imported, create bug
- b, _, err = repo.NewBugRaw(
+ b, _, err = repo.Bugs().NewRaw(
author,
issue.CreatedAt.Unix(),
text.CleanupOneLine(issue.Title),
@@ -338,7 +337,7 @@ func (gi *gitlabImporter) ensureIssueEvent(repo *cache.RepoCache, b *cache.BugCa
func (gi *gitlabImporter) ensurePerson(repo *cache.RepoCache, id int) (*cache.IdentityCache, error) {
// Look first in the cache
- i, err := repo.ResolveIdentityImmutableMetadata(metaKeyGitlabId, strconv.Itoa(id))
+ i, err := repo.Identities().ResolveIdentityImmutableMetadata(metaKeyGitlabId, strconv.Itoa(id))
if err == nil {
return i, nil
}
@@ -351,7 +350,7 @@ func (gi *gitlabImporter) ensurePerson(repo *cache.RepoCache, id int) (*cache.Id
return nil, err
}
- i, err = repo.NewIdentityRaw(
+ i, err = repo.Identities().NewRaw(
user.Name,
user.PublicEmail,
user.Username,
diff --git a/bridge/gitlab/import_test.go b/bridge/gitlab/import_test.go
index d98da4ef..bed93a80 100644
--- a/bridge/gitlab/import_test.go
+++ b/bridge/gitlab/import_test.go
@@ -33,7 +33,7 @@ func TestGitlabImport(t *testing.T) {
repo := repository.CreateGoGitTestRepo(t, false)
- backend, err := cache.NewRepoCache(repo)
+ backend, err := cache.NewRepoCacheNoEvents(repo)
require.NoError(t, err)
defer backend.Close()
@@ -126,11 +126,11 @@ func TestGitlabImport(t *testing.T) {
fmt.Printf("test repository imported in %f seconds\n", time.Since(start).Seconds())
- require.Len(t, backend.AllBugsIds(), len(tests))
+ require.Len(t, backend.Bugs().AllIds(), len(tests))
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- b, err := backend.ResolveBugCreateMetadata(metaKeyGitlabUrl, tt.url)
+ b, err := backend.Bugs().ResolveBugCreateMetadata(metaKeyGitlabUrl, tt.url)
require.NoError(t, err)
ops := b.Snapshot().Operations
diff --git a/bridge/jira/export.go b/bridge/jira/export.go
index 8587a55d..95f9e28c 100644
--- a/bridge/jira/export.go
+++ b/bridge/jira/export.go
@@ -14,7 +14,6 @@ import (
"github.com/MichaelMure/git-bug/bridge/core/auth"
"github.com/MichaelMure/git-bug/cache"
"github.com/MichaelMure/git-bug/entities/bug"
- "github.com/MichaelMure/git-bug/entities/identity"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/entity/dag"
)
@@ -102,8 +101,8 @@ func (je *jiraExporter) cacheAllClient(ctx context.Context, repo *cache.RepoCach
continue
}
- user, err := repo.ResolveIdentityImmutableMetadata(metaKeyJiraLogin, login)
- if err == identity.ErrIdentityNotExist {
+ user, err := repo.Identities().ResolveIdentityImmutableMetadata(metaKeyJiraLogin, login)
+ if entity.IsErrNotFound(err) {
continue
}
if err != nil {
@@ -146,10 +145,10 @@ func (je *jiraExporter) ExportAll(ctx context.Context, repo *cache.RepoCache, si
allIdentitiesIds = append(allIdentitiesIds, id)
}
- allBugsIds := repo.AllBugsIds()
+ allBugsIds := repo.Bugs().AllIds()
for _, id := range allBugsIds {
- b, err := repo.ResolveBug(id)
+ b, err := repo.Bugs().Resolve(id)
if err != nil {
out <- core.NewExportError(errors.Wrap(err, "can't load bug"), id)
return
diff --git a/bridge/jira/import.go b/bridge/jira/import.go
index ff9fbb7a..d8a5f8dd 100644
--- a/bridge/jira/import.go
+++ b/bridge/jira/import.go
@@ -184,7 +184,7 @@ func (ji *jiraImporter) ImportAll(ctx context.Context, repo *cache.RepoCache, si
// Create a bug.Person from a JIRA user
func (ji *jiraImporter) ensurePerson(repo *cache.RepoCache, user User) (*cache.IdentityCache, error) {
// Look first in the cache
- i, err := repo.ResolveIdentityImmutableMetadata(
+ i, err := repo.Identities().ResolveIdentityImmutableMetadata(
metaKeyJiraUser, string(user.Key))
if err == nil {
return i, nil
@@ -193,7 +193,7 @@ func (ji *jiraImporter) ensurePerson(repo *cache.RepoCache, user User) (*cache.I
return nil, err
}
- i, err = repo.NewIdentityRaw(
+ i, err = repo.Identities().NewRaw(
user.DisplayName,
user.EmailAddress,
user.Key,
@@ -219,7 +219,7 @@ func (ji *jiraImporter) ensureIssue(repo *cache.RepoCache, issue Issue) (*cache.
return nil, err
}
- b, err := repo.ResolveBugMatcher(func(excerpt *cache.BugExcerpt) bool {
+ b, err := repo.Bugs().ResolveMatcher(func(excerpt *cache.BugExcerpt) bool {
if _, ok := excerpt.CreateMetadata[metaKeyJiraBaseUrl]; ok &&
excerpt.CreateMetadata[metaKeyJiraBaseUrl] != ji.conf[confKeyBaseUrl] {
return false
@@ -229,12 +229,12 @@ func (ji *jiraImporter) ensureIssue(repo *cache.RepoCache, issue Issue) (*cache.
excerpt.CreateMetadata[metaKeyJiraId] == issue.ID &&
excerpt.CreateMetadata[metaKeyJiraProject] == ji.conf[confKeyProject]
})
- if err != nil && err != bug.ErrBugNotExist {
+ if err != nil && !entity.IsErrNotFound(err) {
return nil, err
}
- if err == bug.ErrBugNotExist {
- b, _, err = repo.NewBugRaw(
+ if entity.IsErrNotFound(err) {
+ b, _, err = repo.Bugs().NewRaw(
author,
issue.Fields.Created.Unix(),
text.CleanupOneLine(issue.Fields.Summary),
diff --git a/bridge/launchpad/import.go b/bridge/launchpad/import.go
index f81e3582..6a20217c 100644
--- a/bridge/launchpad/import.go
+++ b/bridge/launchpad/import.go
@@ -7,7 +7,6 @@ import (
"github.com/MichaelMure/git-bug/bridge/core"
"github.com/MichaelMure/git-bug/cache"
- "github.com/MichaelMure/git-bug/entities/bug"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/util/text"
)
@@ -23,7 +22,7 @@ func (li *launchpadImporter) Init(_ context.Context, repo *cache.RepoCache, conf
func (li *launchpadImporter) ensurePerson(repo *cache.RepoCache, owner LPPerson) (*cache.IdentityCache, error) {
// Look first in the cache
- i, err := repo.ResolveIdentityImmutableMetadata(metaKeyLaunchpadLogin, owner.Login)
+ i, err := repo.Identities().ResolveIdentityImmutableMetadata(metaKeyLaunchpadLogin, owner.Login)
if err == nil {
return i, nil
}
@@ -31,7 +30,7 @@ func (li *launchpadImporter) ensurePerson(repo *cache.RepoCache, owner LPPerson)
return nil, err
}
- return repo.NewIdentityRaw(
+ return repo.Identities().NewRaw(
owner.Name,
"",
owner.Login,
@@ -64,11 +63,11 @@ func (li *launchpadImporter) ImportAll(ctx context.Context, repo *cache.RepoCach
return
default:
lpBugID := fmt.Sprintf("%d", lpBug.ID)
- b, err := repo.ResolveBugMatcher(func(excerpt *cache.BugExcerpt) bool {
+ b, err := repo.Bugs().ResolveMatcher(func(excerpt *cache.BugExcerpt) bool {
return excerpt.CreateMetadata[core.MetaKeyOrigin] == target &&
excerpt.CreateMetadata[metaKeyLaunchpadID] == lpBugID
})
- if err != nil && err != bug.ErrBugNotExist {
+ if err != nil && !entity.IsErrNotFound(err) {
out <- core.NewImportError(err, entity.Id(lpBugID))
return
}
@@ -79,9 +78,9 @@ func (li *launchpadImporter) ImportAll(ctx context.Context, repo *cache.RepoCach
return
}
- if err == bug.ErrBugNotExist {
+ if entity.IsErrNotFound(err) {
createdAt, _ := time.Parse(time.RFC3339, lpBug.CreatedAt)
- b, _, err = repo.NewBugRaw(
+ b, _, err = repo.Bugs().NewRaw(
owner,
createdAt.Unix(),
text.CleanupOneLine(lpBug.Title),
diff --git a/cache/bug_cache.go b/cache/bug_cache.go
index 65e2068f..3466f186 100644
--- a/cache/bug_cache.go
+++ b/cache/bug_cache.go
@@ -2,10 +2,10 @@ package cache
import (
"fmt"
- "sync"
"time"
"github.com/MichaelMure/git-bug/entities/bug"
+ "github.com/MichaelMure/git-bug/entities/identity"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/entity/dag"
"github.com/MichaelMure/git-bug/repository"
@@ -19,63 +19,26 @@ var ErrNoMatchingOp = fmt.Errorf("no matching operation found")
// 2. Maintain an up-to-date Snapshot available.
// 3. Deal with concurrency.
type BugCache struct {
- repoCache *RepoCache
- mu sync.RWMutex
- bug *bug.WithSnapshot
+ CachedEntityBase[*bug.Snapshot, bug.Operation]
}
-func NewBugCache(repoCache *RepoCache, b *bug.Bug) *BugCache {
+func NewBugCache(b *bug.Bug, repo repository.ClockedRepo, getUserIdentity getUserIdentityFunc, entityUpdated func(id entity.Id) error) *BugCache {
return &BugCache{
- repoCache: repoCache,
- bug: &bug.WithSnapshot{Bug: b},
+ CachedEntityBase: CachedEntityBase[*bug.Snapshot, bug.Operation]{
+ repo: repo,
+ entityUpdated: entityUpdated,
+ getUserIdentity: getUserIdentity,
+ entity: &withSnapshot[*bug.Snapshot, bug.Operation]{Interface: b},
+ },
}
}
-func (c *BugCache) Snapshot() *bug.Snapshot {
- c.mu.RLock()
- defer c.mu.RUnlock()
- return c.bug.Compile()
-}
-
-func (c *BugCache) Id() entity.Id {
- return c.bug.Id()
-}
-
-func (c *BugCache) notifyUpdated() error {
- return c.repoCache.bugUpdated(c.bug.Id())
-}
-
-// ResolveOperationWithMetadata will find an operation that has the matching metadata
-func (c *BugCache) ResolveOperationWithMetadata(key string, value string) (entity.Id, error) {
- c.mu.RLock()
- defer c.mu.RUnlock()
- // preallocate but empty
- matching := make([]entity.Id, 0, 5)
-
- for _, op := range c.bug.Operations() {
- opValue, ok := op.GetMetadata(key)
- if ok && value == opValue {
- matching = append(matching, op.Id())
- }
- }
-
- if len(matching) == 0 {
- return "", ErrNoMatchingOp
- }
-
- if len(matching) > 1 {
- return "", bug.NewErrMultipleMatchOp(matching)
- }
-
- return matching[0], nil
-}
-
func (c *BugCache) AddComment(message string) (entity.CombinedId, *bug.AddCommentOperation, error) {
return c.AddCommentWithFiles(message, nil)
}
func (c *BugCache) AddCommentWithFiles(message string, files []repository.Hash) (entity.CombinedId, *bug.AddCommentOperation, error) {
- author, err := c.repoCache.GetUserIdentity()
+ author, err := c.getUserIdentity()
if err != nil {
return entity.UnsetCombinedId, nil, err
}
@@ -83,9 +46,9 @@ func (c *BugCache) AddCommentWithFiles(message string, files []repository.Hash)
return c.AddCommentRaw(author, time.Now().Unix(), message, files, nil)
}
-func (c *BugCache) AddCommentRaw(author *IdentityCache, unixTime int64, message string, files []repository.Hash, metadata map[string]string) (entity.CombinedId, *bug.AddCommentOperation, error) {
+func (c *BugCache) AddCommentRaw(author identity.Interface, unixTime int64, message string, files []repository.Hash, metadata map[string]string) (entity.CombinedId, *bug.AddCommentOperation, error) {
c.mu.Lock()
- commentId, op, err := bug.AddComment(c.bug, author, unixTime, message, files, metadata)
+ commentId, op, err := bug.AddComment(c.entity, author, unixTime, message, files, metadata)
c.mu.Unlock()
if err != nil {
return entity.UnsetCombinedId, nil, err
@@ -94,7 +57,7 @@ func (c *BugCache) AddCommentRaw(author *IdentityCache, unixTime int64, message
}
func (c *BugCache) ChangeLabels(added []string, removed []string) ([]bug.LabelChangeResult, *bug.LabelChangeOperation, error) {
- author, err := c.repoCache.GetUserIdentity()
+ author, err := c.getUserIdentity()
if err != nil {
return nil, nil, err
}
@@ -102,9 +65,9 @@ func (c *BugCache) ChangeLabels(added []string, removed []string) ([]bug.LabelCh
return c.ChangeLabelsRaw(author, time.Now().Unix(), added, removed, nil)
}
-func (c *BugCache) ChangeLabelsRaw(author *IdentityCache, unixTime int64, added []string, removed []string, metadata map[string]string) ([]bug.LabelChangeResult, *bug.LabelChangeOperation, error) {
+func (c *BugCache) ChangeLabelsRaw(author identity.Interface, unixTime int64, added []string, removed []string, metadata map[string]string) ([]bug.LabelChangeResult, *bug.LabelChangeOperation, error) {
c.mu.Lock()
- changes, op, err := bug.ChangeLabels(c.bug, author.Identity, unixTime, added, removed, metadata)
+ changes, op, err := bug.ChangeLabels(c.entity, author, unixTime, added, removed, metadata)
c.mu.Unlock()
if err != nil {
return changes, nil, err
@@ -113,7 +76,7 @@ func (c *BugCache) ChangeLabelsRaw(author *IdentityCache, unixTime int64, added
}
func (c *BugCache) ForceChangeLabels(added []string, removed []string) (*bug.LabelChangeOperation, error) {
- author, err := c.repoCache.GetUserIdentity()
+ author, err := c.getUserIdentity()
if err != nil {
return nil, err
}
@@ -121,9 +84,9 @@ func (c *BugCache) ForceChangeLabels(added []string, removed []string) (*bug.Lab
return c.ForceChangeLabelsRaw(author, time.Now().Unix(), added, removed, nil)
}
-func (c *BugCache) ForceChangeLabelsRaw(author *IdentityCache, unixTime int64, added []string, removed []string, metadata map[string]string) (*bug.LabelChangeOperation, error) {
+func (c *BugCache) ForceChangeLabelsRaw(author identity.Interface, unixTime int64, added []string, removed []string, metadata map[string]string) (*bug.LabelChangeOperation, error) {
c.mu.Lock()
- op, err := bug.ForceChangeLabels(c.bug, author.Identity, unixTime, added, removed, metadata)
+ op, err := bug.ForceChangeLabels(c.entity, author, unixTime, added, removed, metadata)
c.mu.Unlock()
if err != nil {
return nil, err
@@ -132,7 +95,7 @@ func (c *BugCache) ForceChangeLabelsRaw(author *IdentityCache, unixTime int64, a
}
func (c *BugCache) Open() (*bug.SetStatusOperation, error) {
- author, err := c.repoCache.GetUserIdentity()
+ author, err := c.getUserIdentity()
if err != nil {
return nil, err
}
@@ -140,9 +103,9 @@ func (c *BugCache) Open() (*bug.SetStatusOperation, error) {
return c.OpenRaw(author, time.Now().Unix(), nil)
}
-func (c *BugCache) OpenRaw(author *IdentityCache, unixTime int64, metadata map[string]string) (*bug.SetStatusOperation, error) {
+func (c *BugCache) OpenRaw(author identity.Interface, unixTime int64, metadata map[string]string) (*bug.SetStatusOperation, error) {
c.mu.Lock()
- op, err := bug.Open(c.bug, author.Identity, unixTime, metadata)
+ op, err := bug.Open(c.entity, author, unixTime, metadata)
c.mu.Unlock()
if err != nil {
return nil, err
@@ -151,7 +114,7 @@ func (c *BugCache) OpenRaw(author *IdentityCache, unixTime int64, metadata map[s
}
func (c *BugCache) Close() (*bug.SetStatusOperation, error) {
- author, err := c.repoCache.GetUserIdentity()
+ author, err := c.getUserIdentity()
if err != nil {
return nil, err
}
@@ -159,9 +122,9 @@ func (c *BugCache) Close() (*bug.SetStatusOperation, error) {
return c.CloseRaw(author, time.Now().Unix(), nil)
}
-func (c *BugCache) CloseRaw(author *IdentityCache, unixTime int64, metadata map[string]string) (*bug.SetStatusOperation, error) {
+func (c *BugCache) CloseRaw(author identity.Interface, unixTime int64, metadata map[string]string) (*bug.SetStatusOperation, error) {
c.mu.Lock()
- op, err := bug.Close(c.bug, author.Identity, unixTime, metadata)
+ op, err := bug.Close(c.entity, author, unixTime, metadata)
c.mu.Unlock()
if err != nil {
return nil, err
@@ -170,7 +133,7 @@ func (c *BugCache) CloseRaw(author *IdentityCache, unixTime int64, metadata map[
}
func (c *BugCache) SetTitle(title string) (*bug.SetTitleOperation, error) {
- author, err := c.repoCache.GetUserIdentity()
+ author, err := c.getUserIdentity()
if err != nil {
return nil, err
}
@@ -178,9 +141,9 @@ func (c *BugCache) SetTitle(title string) (*bug.SetTitleOperation, error) {
return c.SetTitleRaw(author, time.Now().Unix(), title, nil)
}
-func (c *BugCache) SetTitleRaw(author *IdentityCache, unixTime int64, title string, metadata map[string]string) (*bug.SetTitleOperation, error) {
+func (c *BugCache) SetTitleRaw(author identity.Interface, unixTime int64, title string, metadata map[string]string) (*bug.SetTitleOperation, error) {
c.mu.Lock()
- op, err := bug.SetTitle(c.bug, author.Identity, unixTime, title, metadata)
+ op, err := bug.SetTitle(c.entity, author, unixTime, title, metadata)
c.mu.Unlock()
if err != nil {
return nil, err
@@ -190,7 +153,7 @@ func (c *BugCache) SetTitleRaw(author *IdentityCache, unixTime int64, title stri
// EditCreateComment is a convenience function to edit the body of a bug (the first comment)
func (c *BugCache) EditCreateComment(body string) (entity.CombinedId, *bug.EditCommentOperation, error) {
- author, err := c.repoCache.GetUserIdentity()
+ author, err := c.getUserIdentity()
if err != nil {
return entity.UnsetCombinedId, nil, err
}
@@ -199,9 +162,9 @@ func (c *BugCache) EditCreateComment(body string) (entity.CombinedId, *bug.EditC
}
// EditCreateCommentRaw is a convenience function to edit the body of a bug (the first comment)
-func (c *BugCache) EditCreateCommentRaw(author *IdentityCache, unixTime int64, body string, metadata map[string]string) (entity.CombinedId, *bug.EditCommentOperation, error) {
+func (c *BugCache) EditCreateCommentRaw(author identity.Interface, unixTime int64, body string, metadata map[string]string) (entity.CombinedId, *bug.EditCommentOperation, error) {
c.mu.Lock()
- commentId, op, err := bug.EditCreateComment(c.bug, author.Identity, unixTime, body, nil, metadata)
+ commentId, op, err := bug.EditCreateComment(c.entity, author, unixTime, body, nil, metadata)
c.mu.Unlock()
if err != nil {
return entity.UnsetCombinedId, nil, err
@@ -210,7 +173,7 @@ func (c *BugCache) EditCreateCommentRaw(author *IdentityCache, unixTime int64, b
}
func (c *BugCache) EditComment(target entity.CombinedId, message string) (*bug.EditCommentOperation, error) {
- author, err := c.repoCache.GetUserIdentity()
+ author, err := c.getUserIdentity()
if err != nil {
return nil, err
}
@@ -218,14 +181,14 @@ func (c *BugCache) EditComment(target entity.CombinedId, message string) (*bug.E
return c.EditCommentRaw(author, time.Now().Unix(), target, message, nil)
}
-func (c *BugCache) EditCommentRaw(author *IdentityCache, unixTime int64, target entity.CombinedId, message string, metadata map[string]string) (*bug.EditCommentOperation, error) {
+func (c *BugCache) EditCommentRaw(author identity.Interface, unixTime int64, target entity.CombinedId, message string, metadata map[string]string) (*bug.EditCommentOperation, error) {
comment, err := c.Snapshot().SearchComment(target)
if err != nil {
return nil, err
}
c.mu.Lock()
- commentId, op, err := bug.EditComment(c.bug, author.Identity, unixTime, comment.TargetId(), message, nil, metadata)
+ commentId, op, err := bug.EditComment(c.entity, author, unixTime, comment.TargetId(), message, nil, metadata)
c.mu.Unlock()
if err != nil {
return nil, err
@@ -237,7 +200,7 @@ func (c *BugCache) EditCommentRaw(author *IdentityCache, unixTime int64, target
}
func (c *BugCache) SetMetadata(target entity.Id, newMetadata map[string]string) (*dag.SetMetadataOperation[*bug.Snapshot], error) {
- author, err := c.repoCache.GetUserIdentity()
+ author, err := c.getUserIdentity()
if err != nil {
return nil, err
}
@@ -245,40 +208,12 @@ func (c *BugCache) SetMetadata(target entity.Id, newMetadata map[string]string)
return c.SetMetadataRaw(author, time.Now().Unix(), target, newMetadata)
}
-func (c *BugCache) SetMetadataRaw(author *IdentityCache, unixTime int64, target entity.Id, newMetadata map[string]string) (*dag.SetMetadataOperation[*bug.Snapshot], error) {
+func (c *BugCache) SetMetadataRaw(author identity.Interface, unixTime int64, target entity.Id, newMetadata map[string]string) (*dag.SetMetadataOperation[*bug.Snapshot], error) {
c.mu.Lock()
- op, err := bug.SetMetadata(c.bug, author.Identity, unixTime, target, newMetadata)
+ op, err := bug.SetMetadata(c.entity, author, unixTime, target, newMetadata)
c.mu.Unlock()
if err != nil {
return nil, err
}
return op, c.notifyUpdated()
}
-
-func (c *BugCache) Commit() error {
- c.mu.Lock()
- err := c.bug.Commit(c.repoCache.repo)
- if err != nil {
- c.mu.Unlock()
- return err
- }
- c.mu.Unlock()
- return c.notifyUpdated()
-}
-
-func (c *BugCache) CommitAsNeeded() error {
- c.mu.Lock()
- err := c.bug.CommitAsNeeded(c.repoCache.repo)
- if err != nil {
- c.mu.Unlock()
- return err
- }
- c.mu.Unlock()
- return c.notifyUpdated()
-}
-
-func (c *BugCache) NeedCommit() bool {
- c.mu.RLock()
- defer c.mu.RUnlock()
- return c.bug.NeedCommit()
-}
diff --git a/cache/bug_excerpt.go b/cache/bug_excerpt.go
index 7e3bcad4..26b7ec74 100644
--- a/cache/bug_excerpt.go
+++ b/cache/bug_excerpt.go
@@ -2,12 +2,10 @@ package cache
import (
"encoding/gob"
- "fmt"
"time"
"github.com/MichaelMure/git-bug/entities/bug"
"github.com/MichaelMure/git-bug/entities/common"
- "github.com/MichaelMure/git-bug/entities/identity"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/util/lamport"
)
@@ -17,10 +15,12 @@ func init() {
gob.Register(BugExcerpt{})
}
+var _ Excerpt = &BugExcerpt{}
+
// BugExcerpt hold a subset of the bug values to be able to sort and filter bugs
// efficiently without having to read and compile each raw bugs.
type BugExcerpt struct {
- Id entity.Id
+ id entity.Id
CreateLamportTime lamport.Time
EditLamportTime lamport.Time
@@ -38,26 +38,8 @@ type BugExcerpt struct {
CreateMetadata map[string]string
}
-// identity.Bare data are directly embedded in the bug excerpt
-type LegacyAuthorExcerpt struct {
- Name string
- Login string
-}
-
-func (l LegacyAuthorExcerpt) DisplayName() string {
- switch {
- case l.Name == "" && l.Login != "":
- return l.Login
- case l.Name != "" && l.Login == "":
- return l.Name
- case l.Name != "" && l.Login != "":
- return fmt.Sprintf("%s (%s)", l.Name, l.Login)
- }
-
- panic("invalid person data")
-}
-
-func NewBugExcerpt(b bug.Interface, snap *bug.Snapshot) *BugExcerpt {
+func NewBugExcerpt(b *BugCache) *BugExcerpt {
+ snap := b.Snapshot()
participantsIds := make([]entity.Id, 0, len(snap.Participants))
for _, participant := range snap.Participants {
participantsIds = append(participantsIds, participant.Id())
@@ -69,11 +51,12 @@ func NewBugExcerpt(b bug.Interface, snap *bug.Snapshot) *BugExcerpt {
}
e := &BugExcerpt{
- Id: b.Id(),
+ id: b.Id(),
CreateLamportTime: b.CreateLamportTime(),
EditLamportTime: b.EditLamportTime(),
CreateUnixTime: b.FirstOp().Time().Unix(),
EditUnixTime: snap.EditTime().Unix(),
+ AuthorId: snap.Author.Id(),
Status: snap.Status,
Labels: snap.Labels,
Actors: actorsIds,
@@ -83,16 +66,17 @@ func NewBugExcerpt(b bug.Interface, snap *bug.Snapshot) *BugExcerpt {
CreateMetadata: b.FirstOp().AllMetadata(),
}
- switch snap.Author.(type) {
- case *identity.Identity, *identity.IdentityStub, *IdentityCache:
- e.AuthorId = snap.Author.Id()
- default:
- panic("unhandled identity type")
- }
-
return e
}
+func (b *BugExcerpt) setId(id entity.Id) {
+ b.id = id
+}
+
+func (b *BugExcerpt) Id() entity.Id {
+ return b.id
+}
+
func (b *BugExcerpt) CreateTime() time.Time {
return time.Unix(b.CreateUnixTime, 0)
}
@@ -112,7 +96,7 @@ func (b BugsById) Len() int {
}
func (b BugsById) Less(i, j int) bool {
- return b[i].Id < b[j].Id
+ return b[i].id < b[j].id
}
func (b BugsById) Swap(i, j int) {
diff --git a/cache/bug_subcache.go b/cache/bug_subcache.go
new file mode 100644
index 00000000..920fe1dc
--- /dev/null
+++ b/cache/bug_subcache.go
@@ -0,0 +1,254 @@
+package cache
+
+import (
+ "errors"
+ "sort"
+ "time"
+
+ "github.com/MichaelMure/git-bug/entities/bug"
+ "github.com/MichaelMure/git-bug/entities/identity"
+ "github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/query"
+ "github.com/MichaelMure/git-bug/repository"
+)
+
+type RepoCacheBug struct {
+ *SubCache[*bug.Bug, *BugExcerpt, *BugCache]
+}
+
+func NewRepoCacheBug(repo repository.ClockedRepo,
+ resolvers func() entity.Resolvers,
+ getUserIdentity getUserIdentityFunc) *RepoCacheBug {
+
+ makeCached := func(b *bug.Bug, entityUpdated func(id entity.Id) error) *BugCache {
+ return NewBugCache(b, repo, getUserIdentity, entityUpdated)
+ }
+
+ makeIndexData := func(b *BugCache) []string {
+ snap := b.Snapshot()
+ var res []string
+ for _, comment := range snap.Comments {
+ res = append(res, comment.Message)
+ }
+ res = append(res, snap.Title)
+ return res
+ }
+
+ actions := Actions[*bug.Bug]{
+ ReadWithResolver: bug.ReadWithResolver,
+ ReadAllWithResolver: bug.ReadAllWithResolver,
+ Remove: bug.Remove,
+ MergeAll: bug.MergeAll,
+ }
+
+ sc := NewSubCache[*bug.Bug, *BugExcerpt, *BugCache](
+ repo, resolvers, getUserIdentity,
+ makeCached, NewBugExcerpt, makeIndexData, actions,
+ bug.Typename, bug.Namespace,
+ formatVersion, defaultMaxLoadedBugs,
+ )
+
+ return &RepoCacheBug{SubCache: sc}
+}
+
+// ResolveBugCreateMetadata retrieve a bug that has the exact given metadata on
+// its Create operation, that is, the first operation. It fails if multiple bugs
+// match.
+func (c *RepoCacheBug) ResolveBugCreateMetadata(key string, value string) (*BugCache, error) {
+ return c.ResolveMatcher(func(excerpt *BugExcerpt) bool {
+ return excerpt.CreateMetadata[key] == value
+ })
+}
+
+// ResolveComment search for a Bug/Comment combination matching the merged
+// bug/comment Id prefix. Returns the Bug containing the Comment and the Comment's
+// Id.
+func (c *RepoCacheBug) ResolveComment(prefix string) (*BugCache, entity.CombinedId, error) {
+ bugPrefix, _ := entity.SeparateIds(prefix)
+ bugCandidate := make([]entity.Id, 0, 5)
+
+ // build a list of possible matching bugs
+ c.mu.RLock()
+ for _, excerpt := range c.excerpts {
+ if excerpt.Id().HasPrefix(bugPrefix) {
+ bugCandidate = append(bugCandidate, excerpt.Id())
+ }
+ }
+ c.mu.RUnlock()
+
+ matchingBugIds := make([]entity.Id, 0, 5)
+ matchingCommentId := entity.UnsetCombinedId
+ var matchingBug *BugCache
+
+ // search for matching comments
+ // searching every bug candidate allow for some collision with the bug prefix only,
+ // before being refined with the full comment prefix
+ for _, bugId := range bugCandidate {
+ b, err := c.Resolve(bugId)
+ if err != nil {
+ return nil, entity.UnsetCombinedId, err
+ }
+
+ for _, comment := range b.Snapshot().Comments {
+ if comment.CombinedId().HasPrefix(prefix) {
+ matchingBugIds = append(matchingBugIds, bugId)
+ matchingBug = b
+ matchingCommentId = comment.CombinedId()
+ }
+ }
+ }
+
+ if len(matchingBugIds) > 1 {
+ return nil, entity.UnsetCombinedId, entity.NewErrMultipleMatch("bug/comment", matchingBugIds)
+ } else if len(matchingBugIds) == 0 {
+ return nil, entity.UnsetCombinedId, errors.New("comment doesn't exist")
+ }
+
+ return matchingBug, matchingCommentId, nil
+}
+
+// Query return the id of all Bug matching the given Query
+func (c *RepoCacheBug) Query(q *query.Query) ([]entity.Id, error) {
+ c.mu.RLock()
+ defer c.mu.RUnlock()
+
+ if q == nil {
+ return c.AllIds(), nil
+ }
+
+ matcher := compileMatcher(q.Filters)
+
+ var filtered []*BugExcerpt
+ var foundBySearch map[entity.Id]*BugExcerpt
+
+ if q.Search != nil {
+ foundBySearch = map[entity.Id]*BugExcerpt{}
+
+ index, err := c.repo.GetIndex("bugs")
+ if err != nil {
+ return nil, err
+ }
+
+ res, err := index.Search(q.Search)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, hit := range res {
+ id := entity.Id(hit)
+ foundBySearch[id] = c.excerpts[id]
+ }
+ } else {
+ foundBySearch = c.excerpts
+ }
+
+ for _, excerpt := range foundBySearch {
+ if matcher.Match(excerpt, c.resolvers()) {
+ filtered = append(filtered, excerpt)
+ }
+ }
+
+ var sorter sort.Interface
+
+ switch q.OrderBy {
+ case query.OrderById:
+ sorter = BugsById(filtered)
+ case query.OrderByCreation:
+ sorter = BugsByCreationTime(filtered)
+ case query.OrderByEdit:
+ sorter = BugsByEditTime(filtered)
+ default:
+ return nil, errors.New("missing sort type")
+ }
+
+ switch q.OrderDirection {
+ case query.OrderAscending:
+ // Nothing to do
+ case query.OrderDescending:
+ sorter = sort.Reverse(sorter)
+ default:
+ return nil, errors.New("missing sort direction")
+ }
+
+ sort.Sort(sorter)
+
+ result := make([]entity.Id, len(filtered))
+
+ for i, val := range filtered {
+ result[i] = val.Id()
+ }
+
+ return result, nil
+}
+
+// ValidLabels list valid labels
+//
+// Note: in the future, a proper label policy could be implemented where valid
+// labels are defined in a configuration file. Until that, the default behavior
+// is to return the list of labels already used.
+func (c *RepoCacheBug) ValidLabels() []bug.Label {
+ c.mu.RLock()
+ defer c.mu.RUnlock()
+
+ set := map[bug.Label]interface{}{}
+
+ for _, excerpt := range c.excerpts {
+ for _, l := range excerpt.Labels {
+ set[l] = nil
+ }
+ }
+
+ result := make([]bug.Label, len(set))
+
+ i := 0
+ for l := range set {
+ result[i] = l
+ i++
+ }
+
+ // Sort
+ sort.Slice(result, func(i, j int) bool {
+ return string(result[i]) < string(result[j])
+ })
+
+ return result
+}
+
+// New create a new bug
+// The new bug is written in the repository (commit)
+func (c *RepoCacheBug) New(title string, message string) (*BugCache, *bug.CreateOperation, error) {
+ return c.NewWithFiles(title, message, nil)
+}
+
+// NewWithFiles create a new bug with attached files for the message
+// The new bug is written in the repository (commit)
+func (c *RepoCacheBug) NewWithFiles(title string, message string, files []repository.Hash) (*BugCache, *bug.CreateOperation, error) {
+ author, err := c.getUserIdentity()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return c.NewRaw(author, time.Now().Unix(), title, message, files, nil)
+}
+
+// NewRaw create a new bug with attached files for the message, as
+// well as metadata for the Create operation.
+// The new bug is written in the repository (commit)
+func (c *RepoCacheBug) NewRaw(author identity.Interface, unixTime int64, title string, message string, files []repository.Hash, metadata map[string]string) (*BugCache, *bug.CreateOperation, error) {
+ b, op, err := bug.Create(author, unixTime, title, message, files, metadata)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ err = b.Commit(c.repo)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ cached, err := c.add(b)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return cached, op, nil
+}
diff --git a/cache/cached.go b/cache/cached.go
new file mode 100644
index 00000000..9f9e170d
--- /dev/null
+++ b/cache/cached.go
@@ -0,0 +1,111 @@
+package cache
+
+import (
+ "sync"
+
+ "github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/entity/dag"
+ "github.com/MichaelMure/git-bug/repository"
+ "github.com/MichaelMure/git-bug/util/lamport"
+)
+
+var _ CacheEntity = &CachedEntityBase[dag.Snapshot, dag.Operation]{}
+
+// CachedEntityBase provide the base function of an entity managed by the cache.
+type CachedEntityBase[SnapT dag.Snapshot, OpT dag.Operation] struct {
+ repo repository.ClockedRepo
+ entityUpdated func(id entity.Id) error
+ getUserIdentity getUserIdentityFunc
+
+ mu sync.RWMutex
+ entity dag.Interface[SnapT, OpT]
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) Id() entity.Id {
+ return e.entity.Id()
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) Snapshot() SnapT {
+ e.mu.RLock()
+ defer e.mu.RUnlock()
+ return e.entity.Compile()
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) notifyUpdated() error {
+ return e.entityUpdated(e.entity.Id())
+}
+
+// ResolveOperationWithMetadata will find an operation that has the matching metadata
+func (e *CachedEntityBase[SnapT, OpT]) ResolveOperationWithMetadata(key string, value string) (entity.Id, error) {
+ e.mu.RLock()
+ defer e.mu.RUnlock()
+ // preallocate but empty
+ matching := make([]entity.Id, 0, 5)
+
+ for _, op := range e.entity.Operations() {
+ opValue, ok := op.GetMetadata(key)
+ if ok && value == opValue {
+ matching = append(matching, op.Id())
+ }
+ }
+
+ if len(matching) == 0 {
+ return "", ErrNoMatchingOp
+ }
+
+ if len(matching) > 1 {
+ return "", entity.NewErrMultipleMatch("operation", matching)
+ }
+
+ return matching[0], nil
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) Validate() error {
+ e.mu.RLock()
+ defer e.mu.RUnlock()
+ return e.entity.Validate()
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) Commit() error {
+ e.mu.Lock()
+ err := e.entity.Commit(e.repo)
+ if err != nil {
+ e.mu.Unlock()
+ return err
+ }
+ e.mu.Unlock()
+ return e.notifyUpdated()
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) CommitAsNeeded() error {
+ e.mu.Lock()
+ err := e.entity.CommitAsNeeded(e.repo)
+ if err != nil {
+ e.mu.Unlock()
+ return err
+ }
+ e.mu.Unlock()
+ return e.notifyUpdated()
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) NeedCommit() bool {
+ e.mu.RLock()
+ defer e.mu.RUnlock()
+ return e.entity.NeedCommit()
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) Lock() {
+ e.mu.Lock()
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) CreateLamportTime() lamport.Time {
+ return e.entity.CreateLamportTime()
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) EditLamportTime() lamport.Time {
+ return e.entity.EditLamportTime()
+}
+
+func (e *CachedEntityBase[SnapT, OpT]) FirstOp() OpT {
+ return e.entity.FirstOp()
+}
diff --git a/cache/filter.go b/cache/filter.go
index 299e7c83..5a15e402 100644
--- a/cache/filter.go
+++ b/cache/filter.go
@@ -8,28 +8,22 @@ import (
"github.com/MichaelMure/git-bug/query"
)
-// resolver has the resolving functions needed by filters.
-// This exist mainly to go through the functions of the cache with proper locking.
-type resolver interface {
- ResolveIdentityExcerpt(id entity.Id) (*IdentityExcerpt, error)
-}
-
// Filter is a predicate that match a subset of bugs
-type Filter func(excerpt *BugExcerpt, resolver resolver) bool
+type Filter func(excerpt *BugExcerpt, resolvers entity.Resolvers) bool
// StatusFilter return a Filter that match a bug status
func StatusFilter(status common.Status) Filter {
- return func(excerpt *BugExcerpt, resolver resolver) bool {
+ return func(excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
return excerpt.Status == status
}
}
// AuthorFilter return a Filter that match a bug author
func AuthorFilter(query string) Filter {
- return func(excerpt *BugExcerpt, resolver resolver) bool {
+ return func(excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
query = strings.ToLower(query)
- author, err := resolver.ResolveIdentityExcerpt(excerpt.AuthorId)
+ author, err := entity.Resolve[*IdentityExcerpt](resolvers, excerpt.AuthorId)
if err != nil {
panic(err)
}
@@ -40,7 +34,7 @@ func AuthorFilter(query string) Filter {
// MetadataFilter return a Filter that match a bug metadata at creation time
func MetadataFilter(pair query.StringPair) Filter {
- return func(excerpt *BugExcerpt, resolver resolver) bool {
+ return func(excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
if value, ok := excerpt.CreateMetadata[pair.Key]; ok {
return value == pair.Value
}
@@ -50,7 +44,7 @@ func MetadataFilter(pair query.StringPair) Filter {
// LabelFilter return a Filter that match a label
func LabelFilter(label string) Filter {
- return func(excerpt *BugExcerpt, resolver resolver) bool {
+ return func(excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
for _, l := range excerpt.Labels {
if string(l) == label {
return true
@@ -62,11 +56,11 @@ func LabelFilter(label string) Filter {
// ActorFilter return a Filter that match a bug actor
func ActorFilter(query string) Filter {
- return func(excerpt *BugExcerpt, resolver resolver) bool {
+ return func(excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
query = strings.ToLower(query)
for _, id := range excerpt.Actors {
- identityExcerpt, err := resolver.ResolveIdentityExcerpt(id)
+ identityExcerpt, err := entity.Resolve[*IdentityExcerpt](resolvers, id)
if err != nil {
panic(err)
}
@@ -81,11 +75,11 @@ func ActorFilter(query string) Filter {
// ParticipantFilter return a Filter that match a bug participant
func ParticipantFilter(query string) Filter {
- return func(excerpt *BugExcerpt, resolver resolver) bool {
+ return func(excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
query = strings.ToLower(query)
for _, id := range excerpt.Participants {
- identityExcerpt, err := resolver.ResolveIdentityExcerpt(id)
+ identityExcerpt, err := entity.Resolve[*IdentityExcerpt](resolvers, id)
if err != nil {
panic(err)
}
@@ -100,7 +94,7 @@ func ParticipantFilter(query string) Filter {
// TitleFilter return a Filter that match if the title contains the given query
func TitleFilter(query string) Filter {
- return func(excerpt *BugExcerpt, resolver resolver) bool {
+ return func(excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
return strings.Contains(
strings.ToLower(excerpt.Title),
strings.ToLower(query),
@@ -110,7 +104,7 @@ func TitleFilter(query string) Filter {
// NoLabelFilter return a Filter that match the absence of labels
func NoLabelFilter() Filter {
- return func(excerpt *BugExcerpt, resolver resolver) bool {
+ return func(excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
return len(excerpt.Labels) == 0
}
}
@@ -161,36 +155,36 @@ func compileMatcher(filters query.Filters) *Matcher {
}
// Match check if a bug match the set of filters
-func (f *Matcher) Match(excerpt *BugExcerpt, resolver resolver) bool {
- if match := f.orMatch(f.Status, excerpt, resolver); !match {
+func (f *Matcher) Match(excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
+ if match := f.orMatch(f.Status, excerpt, resolvers); !match {
return false
}
- if match := f.orMatch(f.Author, excerpt, resolver); !match {
+ if match := f.orMatch(f.Author, excerpt, resolvers); !match {
return false
}
- if match := f.orMatch(f.Metadata, excerpt, resolver); !match {
+ if match := f.orMatch(f.Metadata, excerpt, resolvers); !match {
return false
}
- if match := f.orMatch(f.Participant, excerpt, resolver); !match {
+ if match := f.orMatch(f.Participant, excerpt, resolvers); !match {
return false
}
- if match := f.orMatch(f.Actor, excerpt, resolver); !match {
+ if match := f.orMatch(f.Actor, excerpt, resolvers); !match {
return false
}
- if match := f.andMatch(f.Label, excerpt, resolver); !match {
+ if match := f.andMatch(f.Label, excerpt, resolvers); !match {
return false
}
- if match := f.andMatch(f.NoFilters, excerpt, resolver); !match {
+ if match := f.andMatch(f.NoFilters, excerpt, resolvers); !match {
return false
}
- if match := f.andMatch(f.Title, excerpt, resolver); !match {
+ if match := f.andMatch(f.Title, excerpt, resolvers); !match {
return false
}
@@ -198,28 +192,28 @@ func (f *Matcher) Match(excerpt *BugExcerpt, resolver resolver) bool {
}
// Check if any of the filters provided match the bug
-func (*Matcher) orMatch(filters []Filter, excerpt *BugExcerpt, resolver resolver) bool {
+func (*Matcher) orMatch(filters []Filter, excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
if len(filters) == 0 {
return true
}
match := false
for _, f := range filters {
- match = match || f(excerpt, resolver)
+ match = match || f(excerpt, resolvers)
}
return match
}
-// Check if all of the filters provided match the bug
-func (*Matcher) andMatch(filters []Filter, excerpt *BugExcerpt, resolver resolver) bool {
+// Check if all the filters provided match the bug
+func (*Matcher) andMatch(filters []Filter, excerpt *BugExcerpt, resolvers entity.Resolvers) bool {
if len(filters) == 0 {
return true
}
match := true
for _, f := range filters {
- match = match && f(excerpt, resolver)
+ match = match && f(excerpt, resolvers)
}
return match
diff --git a/cache/identity_cache.go b/cache/identity_cache.go
index 3b7bb818..466b6150 100644
--- a/cache/identity_cache.go
+++ b/cache/identity_cache.go
@@ -1,31 +1,41 @@
package cache
import (
+ "sync"
+
"github.com/MichaelMure/git-bug/entities/identity"
+ "github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/repository"
)
var _ identity.Interface = &IdentityCache{}
+var _ CacheEntity = &IdentityCache{}
// IdentityCache is a wrapper around an Identity for caching.
type IdentityCache struct {
+ repo repository.ClockedRepo
+ entityUpdated func(id entity.Id) error
+
+ mu sync.Mutex
*identity.Identity
- repoCache *RepoCache
}
-func NewIdentityCache(repoCache *RepoCache, id *identity.Identity) *IdentityCache {
+func NewIdentityCache(i *identity.Identity, repo repository.ClockedRepo, entityUpdated func(id entity.Id) error) *IdentityCache {
return &IdentityCache{
- Identity: id,
- repoCache: repoCache,
+ repo: repo,
+ entityUpdated: entityUpdated,
+ Identity: i,
}
}
func (i *IdentityCache) notifyUpdated() error {
- return i.repoCache.identityUpdated(i.Identity.Id())
+ return i.entityUpdated(i.Identity.Id())
}
func (i *IdentityCache) Mutate(repo repository.RepoClock, f func(*identity.Mutator)) error {
+ i.mu.Lock()
err := i.Identity.Mutate(repo, f)
+ i.mu.Unlock()
if err != nil {
return err
}
@@ -33,7 +43,9 @@ func (i *IdentityCache) Mutate(repo repository.RepoClock, f func(*identity.Mutat
}
func (i *IdentityCache) Commit() error {
- err := i.Identity.Commit(i.repoCache.repo)
+ i.mu.Lock()
+ err := i.Identity.Commit(i.repo)
+ i.mu.Unlock()
if err != nil {
return err
}
@@ -41,9 +53,15 @@ func (i *IdentityCache) Commit() error {
}
func (i *IdentityCache) CommitAsNeeded() error {
- err := i.Identity.CommitAsNeeded(i.repoCache.repo)
+ i.mu.Lock()
+ err := i.Identity.CommitAsNeeded(i.repo)
+ i.mu.Unlock()
if err != nil {
return err
}
return i.notifyUpdated()
}
+
+func (i *IdentityCache) Lock() {
+ i.mu.Lock()
+}
diff --git a/cache/identity_excerpt.go b/cache/identity_excerpt.go
index 0166f493..79d88537 100644
--- a/cache/identity_excerpt.go
+++ b/cache/identity_excerpt.go
@@ -5,7 +5,6 @@ import (
"fmt"
"strings"
- "github.com/MichaelMure/git-bug/entities/identity"
"github.com/MichaelMure/git-bug/entity"
)
@@ -14,26 +13,36 @@ func init() {
gob.Register(IdentityExcerpt{})
}
+var _ Excerpt = &IdentityExcerpt{}
+
// IdentityExcerpt hold a subset of the identity values to be able to sort and
// filter identities efficiently without having to read and compile each raw
// identity.
type IdentityExcerpt struct {
- Id entity.Id
+ id entity.Id
Name string
Login string
ImmutableMetadata map[string]string
}
-func NewIdentityExcerpt(i *identity.Identity) *IdentityExcerpt {
+func NewIdentityExcerpt(i *IdentityCache) *IdentityExcerpt {
return &IdentityExcerpt{
- Id: i.Id(),
+ id: i.Id(),
Name: i.Name(),
Login: i.Login(),
ImmutableMetadata: i.ImmutableMetadata(),
}
}
+func (i *IdentityExcerpt) setId(id entity.Id) {
+ i.id = id
+}
+
+func (i *IdentityExcerpt) Id() entity.Id {
+ return i.id
+}
+
// DisplayName return a non-empty string to display, representing the
// identity, based on the non-empty values.
func (i *IdentityExcerpt) DisplayName() string {
@@ -51,7 +60,7 @@ func (i *IdentityExcerpt) DisplayName() string {
// Match matches a query with the identity name, login and ID prefixes
func (i *IdentityExcerpt) Match(query string) bool {
- return i.Id.HasPrefix(query) ||
+ return i.id.HasPrefix(query) ||
strings.Contains(strings.ToLower(i.Name), query) ||
strings.Contains(strings.ToLower(i.Login), query)
}
@@ -67,7 +76,7 @@ func (b IdentityById) Len() int {
}
func (b IdentityById) Less(i, j int) bool {
- return b[i].Id < b[j].Id
+ return b[i].id < b[j].id
}
func (b IdentityById) Swap(i, j int) {
diff --git a/cache/identity_subcache.go b/cache/identity_subcache.go
new file mode 100644
index 00000000..f862ca8b
--- /dev/null
+++ b/cache/identity_subcache.go
@@ -0,0 +1,124 @@
+package cache
+
+import (
+ "fmt"
+
+ "github.com/MichaelMure/git-bug/entities/identity"
+ "github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/repository"
+)
+
+type RepoCacheIdentity struct {
+ *SubCache[*identity.Identity, *IdentityExcerpt, *IdentityCache]
+}
+
+func NewRepoCacheIdentity(repo repository.ClockedRepo,
+ resolvers func() entity.Resolvers,
+ getUserIdentity getUserIdentityFunc) *RepoCacheIdentity {
+
+ makeCached := func(i *identity.Identity, entityUpdated func(id entity.Id) error) *IdentityCache {
+ return NewIdentityCache(i, repo, entityUpdated)
+ }
+
+ makeIndex := func(i *IdentityCache) []string {
+ // no indexing
+ return nil
+ }
+
+ // TODO: this is terribly ugly, but we are currently stuck with the fact that identities are NOT using the fancy dag framework.
+ // This lead to various complication here and there to handle entities generically, and avoid large code duplication.
+ // TL;DR: something has to give, and this is the less ugly solution I found. This "normalize" identities as just another "dag framework"
+ // entity. Ideally identities would be converted to the dag framework, but right now that could lead to potential attack: if an old
+ // private key is leaked, it would be possible to craft a legal identity update that take over the most recent version. While this is
+ // meaningless in the case of a normal entity, it's really an issues for identities.
+
+ actions := Actions[*identity.Identity]{
+ ReadWithResolver: func(repo repository.ClockedRepo, resolvers entity.Resolvers, id entity.Id) (*identity.Identity, error) {
+ return identity.ReadLocal(repo, id)
+ },
+ ReadAllWithResolver: func(repo repository.ClockedRepo, resolvers entity.Resolvers) <-chan entity.StreamedEntity[*identity.Identity] {
+ return identity.ReadAllLocal(repo)
+ },
+ Remove: identity.RemoveIdentity,
+ MergeAll: func(repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, mergeAuthor identity.Interface) <-chan entity.MergeResult {
+ return identity.MergeAll(repo, remote)
+ },
+ }
+
+ sc := NewSubCache[*identity.Identity, *IdentityExcerpt, *IdentityCache](
+ repo, resolvers, getUserIdentity,
+ makeCached, NewIdentityExcerpt, makeIndex, actions,
+ identity.Typename, identity.Namespace,
+ formatVersion, defaultMaxLoadedBugs,
+ )
+
+ return &RepoCacheIdentity{SubCache: sc}
+}
+
+// ResolveIdentityImmutableMetadata retrieve an Identity that has the exact given metadata on
+// one of its version. If multiple version have the same key, the first defined take precedence.
+func (c *RepoCacheIdentity) ResolveIdentityImmutableMetadata(key string, value string) (*IdentityCache, error) {
+ return c.ResolveMatcher(func(excerpt *IdentityExcerpt) bool {
+ return excerpt.ImmutableMetadata[key] == value
+ })
+}
+
+// New create a new identity
+// The new identity is written in the repository (commit)
+func (c *RepoCacheIdentity) New(name string, email string) (*IdentityCache, error) {
+ return c.NewRaw(name, email, "", "", nil, nil)
+}
+
+// NewFull create a new identity
+// The new identity is written in the repository (commit)
+func (c *RepoCacheIdentity) NewFull(name string, email string, login string, avatarUrl string, keys []*identity.Key) (*IdentityCache, error) {
+ return c.NewRaw(name, email, login, avatarUrl, keys, nil)
+}
+
+func (c *RepoCacheIdentity) NewRaw(name string, email string, login string, avatarUrl string, keys []*identity.Key, metadata map[string]string) (*IdentityCache, error) {
+ i, err := identity.NewIdentityFull(c.repo, name, email, login, avatarUrl, keys)
+ if err != nil {
+ return nil, err
+ }
+ return c.finishIdentity(i, metadata)
+}
+
+func (c *RepoCacheIdentity) NewFromGitUser() (*IdentityCache, error) {
+ return c.NewFromGitUserRaw(nil)
+}
+
+func (c *RepoCacheIdentity) NewFromGitUserRaw(metadata map[string]string) (*IdentityCache, error) {
+ i, err := identity.NewFromGitUser(c.repo)
+ if err != nil {
+ return nil, err
+ }
+ return c.finishIdentity(i, metadata)
+}
+
+func (c *RepoCacheIdentity) finishIdentity(i *identity.Identity, metadata map[string]string) (*IdentityCache, error) {
+ for key, value := range metadata {
+ i.SetMetadata(key, value)
+ }
+
+ err := i.Commit(c.repo)
+ if err != nil {
+ return nil, err
+ }
+
+ c.mu.Lock()
+ if _, has := c.cached[i.Id()]; has {
+ return nil, fmt.Errorf("identity %s already exist in the cache", i.Id())
+ }
+
+ cached := NewIdentityCache(i, c.repo, c.entityUpdated)
+ c.cached[i.Id()] = cached
+ c.mu.Unlock()
+
+ // force the write of the excerpt
+ err = c.entityUpdated(i.Id())
+ if err != nil {
+ return nil, err
+ }
+
+ return cached, nil
+}
diff --git a/cache/lru_id_cache.go b/cache/lru_id_cache.go
index fda12ca6..0e5e31a7 100644
--- a/cache/lru_id_cache.go
+++ b/cache/lru_id_cache.go
@@ -8,49 +8,49 @@ import (
"github.com/MichaelMure/git-bug/entity"
)
-type LRUIdCache struct {
- parentCache *lru.Cache
+type lruIdCache struct {
+ lru *lru.Cache
}
-func NewLRUIdCache() *LRUIdCache {
+func newLRUIdCache() *lruIdCache {
// we can ignore the error here as it would only fail if the size is negative.
cache, _ := lru.New(math.MaxInt32)
- return &LRUIdCache{
+ return &lruIdCache{
cache,
}
}
-func (c *LRUIdCache) Add(id entity.Id) bool {
- return c.parentCache.Add(id, nil)
+func (c *lruIdCache) Add(id entity.Id) bool {
+ return c.lru.Add(id, nil)
}
-func (c *LRUIdCache) Contains(id entity.Id) bool {
- return c.parentCache.Contains(id)
+func (c *lruIdCache) Contains(id entity.Id) bool {
+ return c.lru.Contains(id)
}
-func (c *LRUIdCache) Get(id entity.Id) bool {
- _, present := c.parentCache.Get(id)
+func (c *lruIdCache) Get(id entity.Id) bool {
+ _, present := c.lru.Get(id)
return present
}
-func (c *LRUIdCache) GetOldest() (entity.Id, bool) {
- id, _, present := c.parentCache.GetOldest()
+func (c *lruIdCache) GetOldest() (entity.Id, bool) {
+ id, _, present := c.lru.GetOldest()
return id.(entity.Id), present
}
-func (c *LRUIdCache) GetOldestToNewest() (ids []entity.Id) {
- interfaceKeys := c.parentCache.Keys()
+func (c *lruIdCache) GetOldestToNewest() (ids []entity.Id) {
+ interfaceKeys := c.lru.Keys()
for _, id := range interfaceKeys {
ids = append(ids, id.(entity.Id))
}
return
}
-func (c *LRUIdCache) Len() int {
- return c.parentCache.Len()
+func (c *lruIdCache) Len() int {
+ return c.lru.Len()
}
-func (c *LRUIdCache) Remove(id entity.Id) bool {
- return c.parentCache.Remove(id)
+func (c *lruIdCache) Remove(id entity.Id) bool {
+ return c.lru.Remove(id)
}
diff --git a/cache/multi_repo_cache.go b/cache/multi_repo_cache.go
index 659cd5e6..007737ad 100644
--- a/cache/multi_repo_cache.go
+++ b/cache/multi_repo_cache.go
@@ -21,25 +21,25 @@ func NewMultiRepoCache() *MultiRepoCache {
}
// RegisterRepository register a named repository. Use this for multi-repo setup
-func (c *MultiRepoCache) RegisterRepository(ref string, repo repository.ClockedRepo) (*RepoCache, error) {
- r, err := NewRepoCache(repo)
+func (c *MultiRepoCache) RegisterRepository(name string, repo repository.ClockedRepo) (*RepoCache, chan BuildEvent, error) {
+ r, events, err := NewNamedRepoCache(repo, name)
if err != nil {
- return nil, err
+ return nil, nil, err
}
- c.repos[ref] = r
- return r, nil
+ c.repos[name] = r
+ return r, events, nil
}
-// RegisterDefaultRepository register a unnamed repository. Use this for mono-repo setup
-func (c *MultiRepoCache) RegisterDefaultRepository(repo repository.ClockedRepo) (*RepoCache, error) {
- r, err := NewRepoCache(repo)
+// RegisterDefaultRepository register an unnamed repository. Use this for mono-repo setup
+func (c *MultiRepoCache) RegisterDefaultRepository(repo repository.ClockedRepo) (*RepoCache, chan BuildEvent, error) {
+ r, events, err := NewRepoCache(repo)
if err != nil {
- return nil, err
+ return nil, nil, err
}
c.repos[defaultRepoName] = r
- return r, nil
+ return r, events, nil
}
// DefaultRepo retrieve the default repository
@@ -55,9 +55,9 @@ func (c *MultiRepoCache) DefaultRepo() (*RepoCache, error) {
panic("unreachable")
}
-// ResolveRepo retrieve a repository with a reference
-func (c *MultiRepoCache) ResolveRepo(ref string) (*RepoCache, error) {
- r, ok := c.repos[ref]
+// ResolveRepo retrieve a repository by name
+func (c *MultiRepoCache) ResolveRepo(name string) (*RepoCache, error) {
+ r, ok := c.repos[name]
if !ok {
return nil, fmt.Errorf("unknown repo")
}
diff --git a/cache/repo_cache.go b/cache/repo_cache.go
index 71abf968..7852ec7d 100644
--- a/cache/repo_cache.go
+++ b/cache/repo_cache.go
@@ -8,10 +8,9 @@ import (
"strconv"
"sync"
- "github.com/MichaelMure/git-bug/entities/bug"
- "github.com/MichaelMure/git-bug/entities/identity"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/repository"
+ "github.com/MichaelMure/git-bug/util/multierr"
"github.com/MichaelMure/git-bug/util/process"
)
@@ -28,6 +27,17 @@ var _ repository.RepoCommon = &RepoCache{}
var _ repository.RepoConfig = &RepoCache{}
var _ repository.RepoKeyring = &RepoCache{}
+// cacheMgmt is the expected interface for a sub-cache.
+type cacheMgmt interface {
+ Typename() string
+ Load() error
+ Build() error
+ SetCacheSize(size int)
+ MergeAll(remote string) <-chan entity.MergeResult
+ GetNamespace() string
+ Close() error
+}
+
// RepoCache is a cache for a Repository. This cache has multiple functions:
//
// 1. After being loaded, a Bug is kept in memory in the cache, allowing for fast
@@ -49,88 +59,109 @@ type RepoCache struct {
// the name of the repository, as defined in the MultiRepoCache
name string
- // resolvers for all known entities
+ // resolvers for all known entities and excerpts
resolvers entity.Resolvers
- // maximum number of loaded bugs
- maxLoadedBugs int
+ bugs *RepoCacheBug
+ identities *RepoCacheIdentity
- muBug sync.RWMutex
- // excerpt of bugs data for all bugs
- bugExcerpts map[entity.Id]*BugExcerpt
- // bug loaded in memory
- bugs map[entity.Id]*BugCache
- // loadedBugs is an LRU cache that records which bugs the cache has loaded in
- loadedBugs *LRUIdCache
-
- muIdentity sync.RWMutex
- // excerpt of identities data for all identities
- identitiesExcerpts map[entity.Id]*IdentityExcerpt
- // identities loaded in memory
- identities map[entity.Id]*IdentityCache
+ subcaches []cacheMgmt
// the user identity's id, if known
+ muUserIdentity sync.RWMutex
userIdentityId entity.Id
}
-func NewRepoCache(r repository.ClockedRepo) (*RepoCache, error) {
+// NewRepoCache create or open an unnamed (aka default) cache on top of a raw repository.
+// If the returned BuildEvent channel is not nil, the caller is expected to read all events before the cache is considered
+// ready to use.
+func NewRepoCache(r repository.ClockedRepo) (*RepoCache, chan BuildEvent, error) {
return NewNamedRepoCache(r, "")
}
-func NewNamedRepoCache(r repository.ClockedRepo, name string) (*RepoCache, error) {
+// NewNamedRepoCache create or open a named cache on top of a raw repository.
+// If the returned BuildEvent channel is not nil, the caller is expected to read all events before the cache is considered
+// ready to use.
+func NewNamedRepoCache(r repository.ClockedRepo, name string) (*RepoCache, chan BuildEvent, error) {
c := &RepoCache{
- repo: r,
- name: name,
- maxLoadedBugs: defaultMaxLoadedBugs,
- bugs: make(map[entity.Id]*BugCache),
- loadedBugs: NewLRUIdCache(),
- identities: make(map[entity.Id]*IdentityCache),
+ repo: r,
+ name: name,
}
- c.resolvers = makeResolvers(c)
+ c.identities = NewRepoCacheIdentity(r, c.getResolvers, c.GetUserIdentity)
+ c.subcaches = append(c.subcaches, c.identities)
+
+ c.bugs = NewRepoCacheBug(r, c.getResolvers, c.GetUserIdentity)
+ c.subcaches = append(c.subcaches, c.bugs)
+
+ c.resolvers = entity.Resolvers{
+ &IdentityCache{}: entity.ResolverFunc[*IdentityCache](c.identities.Resolve),
+ &IdentityExcerpt{}: entity.ResolverFunc[*IdentityExcerpt](c.identities.ResolveExcerpt),
+ &BugCache{}: entity.ResolverFunc[*BugCache](c.bugs.Resolve),
+ &BugExcerpt{}: entity.ResolverFunc[*BugExcerpt](c.bugs.ResolveExcerpt),
+ }
err := c.lock()
if err != nil {
- return &RepoCache{}, err
+ return &RepoCache{}, nil, err
}
err = c.load()
if err == nil {
- return c, nil
+ return c, nil, nil
}
// Cache is either missing, broken or outdated. Rebuilding.
- err = c.buildCache()
+ events := c.buildCache()
+
+ return c, events, nil
+}
+
+func NewRepoCacheNoEvents(r repository.ClockedRepo) (*RepoCache, error) {
+ cache, events, err := NewRepoCache(r)
if err != nil {
return nil, err
}
+ if events != nil {
+ for event := range events {
+ if event.Err != nil {
+ for range events {
+ }
+ return nil, err
+ }
+ }
+ }
+ return cache, nil
+}
+
+// Bugs gives access to the Bug entities
+func (c *RepoCache) Bugs() *RepoCacheBug {
+ return c.bugs
+}
+
+// Identities gives access to the Identity entities
+func (c *RepoCache) Identities() *RepoCacheIdentity {
+ return c.identities
+}
- return c, c.write()
+func (c *RepoCache) getResolvers() entity.Resolvers {
+ return c.resolvers
}
// setCacheSize change the maximum number of loaded bugs
func (c *RepoCache) setCacheSize(size int) {
- c.maxLoadedBugs = size
- c.evictIfNeeded()
+ for _, subcache := range c.subcaches {
+ subcache.SetCacheSize(size)
+ }
}
// load will try to read from the disk all the cache files
func (c *RepoCache) load() error {
- err := c.loadBugCache()
- if err != nil {
- return err
+ var errWait multierr.ErrWaitGroup
+ for _, mgmt := range c.subcaches {
+ errWait.Go(mgmt.Load)
}
-
- return c.loadIdentityCache()
-}
-
-// write will serialize on disk all the cache files
-func (c *RepoCache) write() error {
- err := c.writeBugCache()
- if err != nil {
- return err
- }
- return c.writeIdentityCache()
+ return errWait.Wait()
}
func (c *RepoCache) lock() error {
@@ -154,17 +185,16 @@ func (c *RepoCache) lock() error {
}
func (c *RepoCache) Close() error {
- c.muBug.Lock()
- defer c.muBug.Unlock()
- c.muIdentity.Lock()
- defer c.muIdentity.Unlock()
-
- c.identities = make(map[entity.Id]*IdentityCache)
- c.identitiesExcerpts = nil
- c.bugs = make(map[entity.Id]*BugCache)
- c.bugExcerpts = nil
+ var errWait multierr.ErrWaitGroup
+ for _, mgmt := range c.subcaches {
+ errWait.Go(mgmt.Close)
+ }
+ err := errWait.Wait()
+ if err != nil {
+ return err
+ }
- err := c.repo.Close()
+ err = c.repo.Close()
if err != nil {
return err
}
@@ -172,51 +202,59 @@ func (c *RepoCache) Close() error {
return c.repo.LocalStorage().Remove(lockfile)
}
-func (c *RepoCache) buildCache() error {
- _, _ = fmt.Fprintf(os.Stderr, "Building identity cache... ")
-
- c.identitiesExcerpts = make(map[entity.Id]*IdentityExcerpt)
-
- allIdentities := identity.ReadAllLocal(c.repo)
-
- for i := range allIdentities {
- if i.Err != nil {
- return i.Err
- }
-
- c.identitiesExcerpts[i.Identity.Id()] = NewIdentityExcerpt(i.Identity)
- }
-
- _, _ = fmt.Fprintln(os.Stderr, "Done.")
-
- _, _ = fmt.Fprintf(os.Stderr, "Building bug cache... ")
-
- c.bugExcerpts = make(map[entity.Id]*BugExcerpt)
+type BuildEventType int
- allBugs := bug.ReadAllWithResolver(c.repo, c.resolvers)
-
- // wipe the index just to be sure
- err := c.repo.ClearBleveIndex("bug")
- if err != nil {
- return err
- }
-
- for b := range allBugs {
- if b.Err != nil {
- return b.Err
- }
+const (
+ _ BuildEventType = iota
+ BuildEventStarted
+ BuildEventFinished
+)
- snap := b.Bug.Compile()
- c.bugExcerpts[b.Bug.Id()] = NewBugExcerpt(b.Bug, snap)
+// BuildEvent carry an event happening during the cache build process.
+type BuildEvent struct {
+ // Err carry an error if the build process failed. If set, no other field matter.
+ Err error
+ // Typename is the name of the entity of which the event relate to.
+ Typename string
+ // Event is the type of the event.
+ Event BuildEventType
+}
- if err := c.addBugToSearchIndex(snap); err != nil {
- return err
+func (c *RepoCache) buildCache() chan BuildEvent {
+ out := make(chan BuildEvent)
+
+ go func() {
+ defer close(out)
+
+ var wg sync.WaitGroup
+ for _, subcache := range c.subcaches {
+ wg.Add(1)
+ go func(subcache cacheMgmt) {
+ defer wg.Done()
+ out <- BuildEvent{
+ Typename: subcache.Typename(),
+ Event: BuildEventStarted,
+ }
+
+ err := subcache.Build()
+ if err != nil {
+ out <- BuildEvent{
+ Typename: subcache.Typename(),
+ Err: err,
+ }
+ return
+ }
+
+ out <- BuildEvent{
+ Typename: subcache.Typename(),
+ Event: BuildEventFinished,
+ }
+ }(subcache)
}
- }
-
- _, _ = fmt.Fprintln(os.Stderr, "Done.")
+ wg.Wait()
+ }()
- return nil
+ return out
}
// repoIsAvailable check is the given repository is locked by a Cache.
diff --git a/cache/repo_cache_bug.go b/cache/repo_cache_bug.go
deleted file mode 100644
index 2992421c..00000000
--- a/cache/repo_cache_bug.go
+++ /dev/null
@@ -1,556 +0,0 @@
-package cache
-
-import (
- "bytes"
- "encoding/gob"
- "errors"
- "fmt"
- "sort"
- "strings"
- "time"
- "unicode/utf8"
-
- "github.com/blevesearch/bleve"
-
- "github.com/MichaelMure/git-bug/entities/bug"
- "github.com/MichaelMure/git-bug/entity"
- "github.com/MichaelMure/git-bug/query"
- "github.com/MichaelMure/git-bug/repository"
-)
-
-const bugCacheFile = "bug-cache"
-
-var errBugNotInCache = errors.New("bug missing from cache")
-
-// bugUpdated is a callback to trigger when the excerpt of a bug changed,
-// that is each time a bug is updated
-func (c *RepoCache) bugUpdated(id entity.Id) error {
- c.muBug.Lock()
- b, ok := c.bugs[id]
- if !ok {
- c.muBug.Unlock()
-
- // if the bug is not loaded at this point, it means it was loaded before
- // but got evicted. Which means we potentially have multiple copies in
- // memory and thus concurrent write.
- // Failing immediately here is the simple and safe solution to avoid
- // complicated data loss.
- return errBugNotInCache
- }
- c.loadedBugs.Get(id)
- c.bugExcerpts[id] = NewBugExcerpt(b.bug, b.Snapshot())
- c.muBug.Unlock()
-
- if err := c.addBugToSearchIndex(b.Snapshot()); err != nil {
- return err
- }
-
- // we only need to write the bug cache
- return c.writeBugCache()
-}
-
-// load will try to read from the disk the bug cache file
-func (c *RepoCache) loadBugCache() error {
- c.muBug.Lock()
- defer c.muBug.Unlock()
-
- f, err := c.repo.LocalStorage().Open(bugCacheFile)
- if err != nil {
- return err
- }
-
- decoder := gob.NewDecoder(f)
-
- aux := struct {
- Version uint
- Excerpts map[entity.Id]*BugExcerpt
- }{}
-
- err = decoder.Decode(&aux)
- if err != nil {
- return err
- }
-
- if aux.Version != formatVersion {
- return fmt.Errorf("unknown cache format version %v", aux.Version)
- }
-
- c.bugExcerpts = aux.Excerpts
-
- index, err := c.repo.GetBleveIndex("bug")
- if err != nil {
- return err
- }
-
- // simple heuristic to detect a mismatch between the index and the bugs
- count, err := index.DocCount()
- if err != nil {
- return err
- }
- if count != uint64(len(c.bugExcerpts)) {
- return fmt.Errorf("count mismatch between bleve and bug excerpts")
- }
-
- return nil
-}
-
-// write will serialize on disk the bug cache file
-func (c *RepoCache) writeBugCache() error {
- c.muBug.RLock()
- defer c.muBug.RUnlock()
-
- var data bytes.Buffer
-
- aux := struct {
- Version uint
- Excerpts map[entity.Id]*BugExcerpt
- }{
- Version: formatVersion,
- Excerpts: c.bugExcerpts,
- }
-
- encoder := gob.NewEncoder(&data)
-
- err := encoder.Encode(aux)
- if err != nil {
- return err
- }
-
- f, err := c.repo.LocalStorage().Create(bugCacheFile)
- if err != nil {
- return err
- }
-
- _, err = f.Write(data.Bytes())
- if err != nil {
- return err
- }
-
- return f.Close()
-}
-
-// ResolveBugExcerpt retrieve a BugExcerpt matching the exact given id
-func (c *RepoCache) ResolveBugExcerpt(id entity.Id) (*BugExcerpt, error) {
- c.muBug.RLock()
- defer c.muBug.RUnlock()
-
- excerpt, ok := c.bugExcerpts[id]
- if !ok {
- return nil, bug.ErrBugNotExist
- }
-
- return excerpt, nil
-}
-
-// ResolveBug retrieve a bug matching the exact given id
-func (c *RepoCache) ResolveBug(id entity.Id) (*BugCache, error) {
- c.muBug.RLock()
- cached, ok := c.bugs[id]
- if ok {
- c.loadedBugs.Get(id)
- c.muBug.RUnlock()
- return cached, nil
- }
- c.muBug.RUnlock()
-
- b, err := bug.ReadWithResolver(c.repo, c.resolvers, id)
- if err != nil {
- return nil, err
- }
-
- cached = NewBugCache(c, b)
-
- c.muBug.Lock()
- c.bugs[id] = cached
- c.loadedBugs.Add(id)
- c.muBug.Unlock()
-
- c.evictIfNeeded()
-
- return cached, nil
-}
-
-// evictIfNeeded will evict a bug from the cache if needed
-// it also removes references of the bug from the bugs
-func (c *RepoCache) evictIfNeeded() {
- c.muBug.Lock()
- defer c.muBug.Unlock()
- if c.loadedBugs.Len() <= c.maxLoadedBugs {
- return
- }
-
- for _, id := range c.loadedBugs.GetOldestToNewest() {
- b := c.bugs[id]
- if b.NeedCommit() {
- continue
- }
-
- b.mu.Lock()
- c.loadedBugs.Remove(id)
- delete(c.bugs, id)
-
- if c.loadedBugs.Len() <= c.maxLoadedBugs {
- return
- }
- }
-}
-
-// ResolveBugExcerptPrefix retrieve a BugExcerpt matching an id prefix. It fails if multiple
-// bugs match.
-func (c *RepoCache) ResolveBugExcerptPrefix(prefix string) (*BugExcerpt, error) {
- return c.ResolveBugExcerptMatcher(func(excerpt *BugExcerpt) bool {
- return excerpt.Id.HasPrefix(prefix)
- })
-}
-
-// ResolveBugPrefix retrieve a bug matching an id prefix. It fails if multiple
-// bugs match.
-func (c *RepoCache) ResolveBugPrefix(prefix string) (*BugCache, error) {
- return c.ResolveBugMatcher(func(excerpt *BugExcerpt) bool {
- return excerpt.Id.HasPrefix(prefix)
- })
-}
-
-// ResolveBugCreateMetadata retrieve a bug that has the exact given metadata on
-// its Create operation, that is, the first operation. It fails if multiple bugs
-// match.
-func (c *RepoCache) ResolveBugCreateMetadata(key string, value string) (*BugCache, error) {
- return c.ResolveBugMatcher(func(excerpt *BugExcerpt) bool {
- return excerpt.CreateMetadata[key] == value
- })
-}
-
-func (c *RepoCache) ResolveBugExcerptMatcher(f func(*BugExcerpt) bool) (*BugExcerpt, error) {
- id, err := c.resolveBugMatcher(f)
- if err != nil {
- return nil, err
- }
- return c.ResolveBugExcerpt(id)
-}
-
-func (c *RepoCache) ResolveBugMatcher(f func(*BugExcerpt) bool) (*BugCache, error) {
- id, err := c.resolveBugMatcher(f)
- if err != nil {
- return nil, err
- }
- return c.ResolveBug(id)
-}
-
-func (c *RepoCache) resolveBugMatcher(f func(*BugExcerpt) bool) (entity.Id, error) {
- c.muBug.RLock()
- defer c.muBug.RUnlock()
-
- // preallocate but empty
- matching := make([]entity.Id, 0, 5)
-
- for _, excerpt := range c.bugExcerpts {
- if f(excerpt) {
- matching = append(matching, excerpt.Id)
- }
- }
-
- if len(matching) > 1 {
- return entity.UnsetId, bug.NewErrMultipleMatchBug(matching)
- }
-
- if len(matching) == 0 {
- return entity.UnsetId, bug.ErrBugNotExist
- }
-
- return matching[0], nil
-}
-
-// ResolveComment search for a Bug/Comment combination matching the merged
-// bug/comment Id prefix. Returns the Bug containing the Comment and the Comment's
-// Id.
-func (c *RepoCache) ResolveComment(prefix string) (*BugCache, entity.CombinedId, error) {
- bugPrefix, _ := entity.SeparateIds(prefix)
- bugCandidate := make([]entity.Id, 0, 5)
-
- // build a list of possible matching bugs
- c.muBug.RLock()
- for _, excerpt := range c.bugExcerpts {
- if excerpt.Id.HasPrefix(bugPrefix) {
- bugCandidate = append(bugCandidate, excerpt.Id)
- }
- }
- c.muBug.RUnlock()
-
- matchingBugIds := make([]entity.Id, 0, 5)
- matchingCommentId := entity.UnsetCombinedId
- var matchingBug *BugCache
-
- // search for matching comments
- // searching every bug candidate allow for some collision with the bug prefix only,
- // before being refined with the full comment prefix
- for _, bugId := range bugCandidate {
- b, err := c.ResolveBug(bugId)
- if err != nil {
- return nil, entity.UnsetCombinedId, err
- }
-
- for _, comment := range b.Snapshot().Comments {
- if comment.CombinedId().HasPrefix(prefix) {
- matchingBugIds = append(matchingBugIds, bugId)
- matchingBug = b
- matchingCommentId = comment.CombinedId()
- }
- }
- }
-
- if len(matchingBugIds) > 1 {
- return nil, entity.UnsetCombinedId, entity.NewErrMultipleMatch("bug/comment", matchingBugIds)
- } else if len(matchingBugIds) == 0 {
- return nil, entity.UnsetCombinedId, errors.New("comment doesn't exist")
- }
-
- return matchingBug, matchingCommentId, nil
-}
-
-// QueryBugs return the id of all Bug matching the given Query
-func (c *RepoCache) QueryBugs(q *query.Query) ([]entity.Id, error) {
- c.muBug.RLock()
- defer c.muBug.RUnlock()
-
- if q == nil {
- return c.AllBugsIds(), nil
- }
-
- matcher := compileMatcher(q.Filters)
-
- var filtered []*BugExcerpt
- var foundBySearch map[entity.Id]*BugExcerpt
-
- if q.Search != nil {
- foundBySearch = map[entity.Id]*BugExcerpt{}
-
- terms := make([]string, len(q.Search))
- copy(terms, q.Search)
- for i, search := range q.Search {
- if strings.Contains(search, " ") {
- terms[i] = fmt.Sprintf("\"%s\"", search)
- }
- }
-
- bleveQuery := bleve.NewQueryStringQuery(strings.Join(terms, " "))
- bleveSearch := bleve.NewSearchRequest(bleveQuery)
-
- index, err := c.repo.GetBleveIndex("bug")
- if err != nil {
- return nil, err
- }
-
- searchResults, err := index.Search(bleveSearch)
- if err != nil {
- return nil, err
- }
-
- for _, hit := range searchResults.Hits {
- foundBySearch[entity.Id(hit.ID)] = c.bugExcerpts[entity.Id(hit.ID)]
- }
- } else {
- foundBySearch = c.bugExcerpts
- }
-
- for _, excerpt := range foundBySearch {
- if matcher.Match(excerpt, c) {
- filtered = append(filtered, excerpt)
- }
- }
-
- var sorter sort.Interface
-
- switch q.OrderBy {
- case query.OrderById:
- sorter = BugsById(filtered)
- case query.OrderByCreation:
- sorter = BugsByCreationTime(filtered)
- case query.OrderByEdit:
- sorter = BugsByEditTime(filtered)
- default:
- return nil, errors.New("missing sort type")
- }
-
- switch q.OrderDirection {
- case query.OrderAscending:
- // Nothing to do
- case query.OrderDescending:
- sorter = sort.Reverse(sorter)
- default:
- return nil, errors.New("missing sort direction")
- }
-
- sort.Sort(sorter)
-
- result := make([]entity.Id, len(filtered))
-
- for i, val := range filtered {
- result[i] = val.Id
- }
-
- return result, nil
-}
-
-// AllBugsIds return all known bug ids
-func (c *RepoCache) AllBugsIds() []entity.Id {
- c.muBug.RLock()
- defer c.muBug.RUnlock()
-
- result := make([]entity.Id, len(c.bugExcerpts))
-
- i := 0
- for _, excerpt := range c.bugExcerpts {
- result[i] = excerpt.Id
- i++
- }
-
- return result
-}
-
-// ValidLabels list valid labels
-//
-// Note: in the future, a proper label policy could be implemented where valid
-// labels are defined in a configuration file. Until that, the default behavior
-// is to return the list of labels already used.
-func (c *RepoCache) ValidLabels() []bug.Label {
- c.muBug.RLock()
- defer c.muBug.RUnlock()
-
- set := map[bug.Label]interface{}{}
-
- for _, excerpt := range c.bugExcerpts {
- for _, l := range excerpt.Labels {
- set[l] = nil
- }
- }
-
- result := make([]bug.Label, len(set))
-
- i := 0
- for l := range set {
- result[i] = l
- i++
- }
-
- // Sort
- sort.Slice(result, func(i, j int) bool {
- return string(result[i]) < string(result[j])
- })
-
- return result
-}
-
-// NewBug create a new bug
-// The new bug is written in the repository (commit)
-func (c *RepoCache) NewBug(title string, message string) (*BugCache, *bug.CreateOperation, error) {
- return c.NewBugWithFiles(title, message, nil)
-}
-
-// NewBugWithFiles create a new bug with attached files for the message
-// The new bug is written in the repository (commit)
-func (c *RepoCache) NewBugWithFiles(title string, message string, files []repository.Hash) (*BugCache, *bug.CreateOperation, error) {
- author, err := c.GetUserIdentity()
- if err != nil {
- return nil, nil, err
- }
-
- return c.NewBugRaw(author, time.Now().Unix(), title, message, files, nil)
-}
-
-// NewBugRaw create a new bug with attached files for the message, as
-// well as metadata for the Create operation.
-// The new bug is written in the repository (commit)
-func (c *RepoCache) NewBugRaw(author *IdentityCache, unixTime int64, title string, message string, files []repository.Hash, metadata map[string]string) (*BugCache, *bug.CreateOperation, error) {
- b, op, err := bug.Create(author.Identity, unixTime, title, message, files, metadata)
- if err != nil {
- return nil, nil, err
- }
-
- err = b.Commit(c.repo)
- if err != nil {
- return nil, nil, err
- }
-
- c.muBug.Lock()
- if _, has := c.bugs[b.Id()]; has {
- c.muBug.Unlock()
- return nil, nil, fmt.Errorf("bug %s already exist in the cache", b.Id())
- }
-
- cached := NewBugCache(c, b)
- c.bugs[b.Id()] = cached
- c.loadedBugs.Add(b.Id())
- c.muBug.Unlock()
-
- c.evictIfNeeded()
-
- // force the write of the excerpt
- err = c.bugUpdated(b.Id())
- if err != nil {
- return nil, nil, err
- }
-
- return cached, op, nil
-}
-
-// RemoveBug removes a bug from the cache and repo given a bug id prefix
-func (c *RepoCache) RemoveBug(prefix string) error {
- b, err := c.ResolveBugPrefix(prefix)
- if err != nil {
- return err
- }
-
- c.muBug.Lock()
-
- err = bug.Remove(c.repo, b.Id())
- if err != nil {
- c.muBug.Unlock()
-
- return err
- }
-
- delete(c.bugs, b.Id())
- delete(c.bugExcerpts, b.Id())
- c.loadedBugs.Remove(b.Id())
-
- c.muBug.Unlock()
-
- return c.writeBugCache()
-}
-
-func (c *RepoCache) addBugToSearchIndex(snap *bug.Snapshot) error {
- searchableBug := struct {
- Text []string
- }{}
-
- // See https://github.com/blevesearch/bleve/issues/1576
- var sb strings.Builder
- normalize := func(text string) string {
- sb.Reset()
- for _, field := range strings.Fields(text) {
- if utf8.RuneCountInString(field) < 100 {
- sb.WriteString(field)
- sb.WriteRune(' ')
- }
- }
- return sb.String()
- }
-
- for _, comment := range snap.Comments {
- searchableBug.Text = append(searchableBug.Text, normalize(comment.Message))
- }
-
- searchableBug.Text = append(searchableBug.Text, normalize(snap.Title))
-
- index, err := c.repo.GetBleveIndex("bug")
- if err != nil {
- return err
- }
-
- err = index.Index(snap.Id().String(), searchableBug)
- if err != nil {
- return err
- }
-
- return nil
-}
diff --git a/cache/repo_cache_common.go b/cache/repo_cache_common.go
index 43ac6beb..f768b8e2 100644
--- a/cache/repo_cache_common.go
+++ b/cache/repo_cache_common.go
@@ -1,12 +1,11 @@
package cache
import (
- "fmt"
+ "sync"
"github.com/go-git/go-billy/v5"
"github.com/pkg/errors"
- "github.com/MichaelMure/git-bug/entities/bug"
"github.com/MichaelMure/git-bug/entities/identity"
"github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/repository"
@@ -74,72 +73,40 @@ func (c *RepoCache) StoreData(data []byte) (repository.Hash, error) {
// Fetch retrieve updates from a remote
// This does not change the local bugs or identities state
func (c *RepoCache) Fetch(remote string) (string, error) {
- stdout1, err := identity.Fetch(c.repo, remote)
- if err != nil {
- return stdout1, err
- }
-
- stdout2, err := bug.Fetch(c.repo, remote)
- if err != nil {
- return stdout2, err
+ prefixes := make([]string, len(c.subcaches))
+ for i, subcache := range c.subcaches {
+ prefixes[i] = subcache.GetNamespace()
}
- return stdout1 + stdout2, nil
+ // fetch everything at once, to have a single auth step if required.
+ return c.repo.FetchRefs(remote, prefixes...)
}
// MergeAll will merge all the available remote bug and identities
func (c *RepoCache) MergeAll(remote string) <-chan entity.MergeResult {
out := make(chan entity.MergeResult)
- // Intercept merge results to update the cache properly
+ dependency := [][]cacheMgmt{
+ {c.identities},
+ {c.bugs},
+ }
+
+ // run MergeAll according to entities dependencies and merge the results
go func() {
defer close(out)
- author, err := c.GetUserIdentity()
- if err != nil {
- out <- entity.NewMergeError(err, "")
- return
- }
-
- results := identity.MergeAll(c.repo, remote)
- for result := range results {
- out <- result
-
- if result.Err != nil {
- continue
- }
-
- switch result.Status {
- case entity.MergeStatusNew, entity.MergeStatusUpdated:
- i := result.Entity.(*identity.Identity)
- c.muIdentity.Lock()
- c.identitiesExcerpts[result.Id] = NewIdentityExcerpt(i)
- c.muIdentity.Unlock()
+ for _, subcaches := range dependency {
+ var wg sync.WaitGroup
+ for _, subcache := range subcaches {
+ wg.Add(1)
+ go func(subcache cacheMgmt) {
+ for res := range subcache.MergeAll(remote) {
+ out <- res
+ }
+ wg.Done()
+ }(subcache)
}
- }
-
- results = bug.MergeAll(c.repo, c.resolvers, remote, author)
- for result := range results {
- out <- result
-
- if result.Err != nil {
- continue
- }
-
- switch result.Status {
- case entity.MergeStatusNew, entity.MergeStatusUpdated:
- b := result.Entity.(*bug.Bug)
- snap := b.Compile()
- c.muBug.Lock()
- c.bugExcerpts[result.Id] = NewBugExcerpt(b, snap)
- c.muBug.Unlock()
- }
- }
-
- err = c.write()
- if err != nil {
- out <- entity.NewMergeError(err, "")
- return
+ wg.Wait()
}
}()
@@ -148,17 +115,13 @@ func (c *RepoCache) MergeAll(remote string) <-chan entity.MergeResult {
// Push update a remote with the local changes
func (c *RepoCache) Push(remote string) (string, error) {
- stdout1, err := identity.Push(c.repo, remote)
- if err != nil {
- return stdout1, err
+ prefixes := make([]string, len(c.subcaches))
+ for i, subcache := range c.subcaches {
+ prefixes[i] = subcache.GetNamespace()
}
- stdout2, err := bug.Push(c.repo, remote)
- if err != nil {
- return stdout2, err
- }
-
- return stdout1 + stdout2, nil
+ // push everything at once, to have a single auth step if required
+ return c.repo.PushRefs(remote, prefixes...)
}
// Pull will do a Fetch + MergeAll
@@ -182,64 +145,64 @@ func (c *RepoCache) Pull(remote string) error {
}
func (c *RepoCache) SetUserIdentity(i *IdentityCache) error {
- err := identity.SetUserIdentity(c.repo, i.Identity)
- if err != nil {
- return err
- }
-
- c.muIdentity.RLock()
- defer c.muIdentity.RUnlock()
+ c.muUserIdentity.RLock()
+ defer c.muUserIdentity.RUnlock()
// Make sure that everything is fine
- if _, ok := c.identities[i.Id()]; !ok {
+ if _, err := c.identities.Resolve(i.Id()); err != nil {
panic("SetUserIdentity while the identity is not from the cache, something is wrong")
}
+ err := identity.SetUserIdentity(c.repo, i.Identity)
+ if err != nil {
+ return err
+ }
+
c.userIdentityId = i.Id()
return nil
}
func (c *RepoCache) GetUserIdentity() (*IdentityCache, error) {
+ c.muUserIdentity.RLock()
if c.userIdentityId != "" {
- i, ok := c.identities[c.userIdentityId]
- if ok {
- return i, nil
- }
+ defer c.muUserIdentity.RUnlock()
+ return c.identities.Resolve(c.userIdentityId)
}
+ c.muUserIdentity.RUnlock()
- c.muIdentity.Lock()
- defer c.muIdentity.Unlock()
+ c.muUserIdentity.Lock()
+ defer c.muUserIdentity.Unlock()
- i, err := identity.GetUserIdentity(c.repo)
+ i, err := identity.GetUserIdentityId(c.repo)
if err != nil {
return nil, err
}
- cached := NewIdentityCache(c, i)
- c.identities[i.Id()] = cached
- c.userIdentityId = i.Id()
+ c.userIdentityId = i
- return cached, nil
+ return c.identities.Resolve(i)
}
func (c *RepoCache) GetUserIdentityExcerpt() (*IdentityExcerpt, error) {
- if c.userIdentityId == "" {
- id, err := identity.GetUserIdentityId(c.repo)
- if err != nil {
- return nil, err
- }
- c.userIdentityId = id
+ c.muUserIdentity.RLock()
+ if c.userIdentityId != "" {
+ defer c.muUserIdentity.RUnlock()
+ return c.identities.ResolveExcerpt(c.userIdentityId)
}
+ c.muUserIdentity.RUnlock()
- c.muIdentity.RLock()
- defer c.muIdentity.RUnlock()
+ c.muUserIdentity.Lock()
+ defer c.muUserIdentity.Unlock()
- excerpt, ok := c.identitiesExcerpts[c.userIdentityId]
- if !ok {
- return nil, fmt.Errorf("cache: missing identity excerpt %v", c.userIdentityId)
+ i, err := identity.GetUserIdentityId(c.repo)
+ if err != nil {
+ return nil, err
}
- return excerpt, nil
+
+ c.userIdentityId = i
+
+ return c.identities.ResolveExcerpt(i)
}
func (c *RepoCache) IsUserIdentitySet() (bool, error) {
diff --git a/cache/repo_cache_identity.go b/cache/repo_cache_identity.go
deleted file mode 100644
index 4f612280..00000000
--- a/cache/repo_cache_identity.go
+++ /dev/null
@@ -1,271 +0,0 @@
-package cache
-
-import (
- "bytes"
- "encoding/gob"
- "fmt"
-
- "github.com/MichaelMure/git-bug/entities/identity"
- "github.com/MichaelMure/git-bug/entity"
-)
-
-const identityCacheFile = "identity-cache"
-
-// identityUpdated is a callback to trigger when the excerpt of an identity
-// changed, that is each time an identity is updated
-func (c *RepoCache) identityUpdated(id entity.Id) error {
- c.muIdentity.Lock()
-
- i, ok := c.identities[id]
- if !ok {
- c.muIdentity.Unlock()
- panic("missing identity in the cache")
- }
-
- c.identitiesExcerpts[id] = NewIdentityExcerpt(i.Identity)
- c.muIdentity.Unlock()
-
- // we only need to write the identity cache
- return c.writeIdentityCache()
-}
-
-// load will try to read from the disk the identity cache file
-func (c *RepoCache) loadIdentityCache() error {
- c.muIdentity.Lock()
- defer c.muIdentity.Unlock()
-
- f, err := c.repo.LocalStorage().Open(identityCacheFile)
- if err != nil {
- return err
- }
-
- decoder := gob.NewDecoder(f)
-
- aux := struct {
- Version uint
- Excerpts map[entity.Id]*IdentityExcerpt
- }{}
-
- err = decoder.Decode(&aux)
- if err != nil {
- return err
- }
-
- if aux.Version != formatVersion {
- return fmt.Errorf("unknown cache format version %v", aux.Version)
- }
-
- c.identitiesExcerpts = aux.Excerpts
- return nil
-}
-
-// write will serialize on disk the identity cache file
-func (c *RepoCache) writeIdentityCache() error {
- c.muIdentity.RLock()
- defer c.muIdentity.RUnlock()
-
- var data bytes.Buffer
-
- aux := struct {
- Version uint
- Excerpts map[entity.Id]*IdentityExcerpt
- }{
- Version: formatVersion,
- Excerpts: c.identitiesExcerpts,
- }
-
- encoder := gob.NewEncoder(&data)
-
- err := encoder.Encode(aux)
- if err != nil {
- return err
- }
-
- f, err := c.repo.LocalStorage().Create(identityCacheFile)
- if err != nil {
- return err
- }
-
- _, err = f.Write(data.Bytes())
- if err != nil {
- return err
- }
-
- return f.Close()
-}
-
-// ResolveIdentityExcerpt retrieve a IdentityExcerpt matching the exact given id
-func (c *RepoCache) ResolveIdentityExcerpt(id entity.Id) (*IdentityExcerpt, error) {
- c.muIdentity.RLock()
- defer c.muIdentity.RUnlock()
-
- e, ok := c.identitiesExcerpts[id]
- if !ok {
- return nil, identity.ErrIdentityNotExist
- }
-
- return e, nil
-}
-
-// ResolveIdentity retrieve an identity matching the exact given id
-func (c *RepoCache) ResolveIdentity(id entity.Id) (*IdentityCache, error) {
- c.muIdentity.RLock()
- cached, ok := c.identities[id]
- c.muIdentity.RUnlock()
- if ok {
- return cached, nil
- }
-
- i, err := identity.ReadLocal(c.repo, id)
- if err != nil {
- return nil, err
- }
-
- cached = NewIdentityCache(c, i)
-
- c.muIdentity.Lock()
- c.identities[id] = cached
- c.muIdentity.Unlock()
-
- return cached, nil
-}
-
-// ResolveIdentityExcerptPrefix retrieve a IdentityExcerpt matching an id prefix.
-// It fails if multiple identities match.
-func (c *RepoCache) ResolveIdentityExcerptPrefix(prefix string) (*IdentityExcerpt, error) {
- return c.ResolveIdentityExcerptMatcher(func(excerpt *IdentityExcerpt) bool {
- return excerpt.Id.HasPrefix(prefix)
- })
-}
-
-// ResolveIdentityPrefix retrieve an Identity matching an id prefix.
-// It fails if multiple identities match.
-func (c *RepoCache) ResolveIdentityPrefix(prefix string) (*IdentityCache, error) {
- return c.ResolveIdentityMatcher(func(excerpt *IdentityExcerpt) bool {
- return excerpt.Id.HasPrefix(prefix)
- })
-}
-
-// ResolveIdentityImmutableMetadata retrieve an Identity that has the exact given metadata on
-// one of its version. If multiple version have the same key, the first defined take precedence.
-func (c *RepoCache) ResolveIdentityImmutableMetadata(key string, value string) (*IdentityCache, error) {
- return c.ResolveIdentityMatcher(func(excerpt *IdentityExcerpt) bool {
- return excerpt.ImmutableMetadata[key] == value
- })
-}
-
-func (c *RepoCache) ResolveIdentityExcerptMatcher(f func(*IdentityExcerpt) bool) (*IdentityExcerpt, error) {
- id, err := c.resolveIdentityMatcher(f)
- if err != nil {
- return nil, err
- }
- return c.ResolveIdentityExcerpt(id)
-}
-
-func (c *RepoCache) ResolveIdentityMatcher(f func(*IdentityExcerpt) bool) (*IdentityCache, error) {
- id, err := c.resolveIdentityMatcher(f)
- if err != nil {
- return nil, err
- }
- return c.ResolveIdentity(id)
-}
-
-func (c *RepoCache) resolveIdentityMatcher(f func(*IdentityExcerpt) bool) (entity.Id, error) {
- c.muIdentity.RLock()
- defer c.muIdentity.RUnlock()
-
- // preallocate but empty
- matching := make([]entity.Id, 0, 5)
-
- for _, excerpt := range c.identitiesExcerpts {
- if f(excerpt) {
- matching = append(matching, excerpt.Id)
- }
- }
-
- if len(matching) > 1 {
- return entity.UnsetId, identity.NewErrMultipleMatch(matching)
- }
-
- if len(matching) == 0 {
- return entity.UnsetId, identity.ErrIdentityNotExist
- }
-
- return matching[0], nil
-}
-
-// AllIdentityIds return all known identity ids
-func (c *RepoCache) AllIdentityIds() []entity.Id {
- c.muIdentity.RLock()
- defer c.muIdentity.RUnlock()
-
- result := make([]entity.Id, len(c.identitiesExcerpts))
-
- i := 0
- for _, excerpt := range c.identitiesExcerpts {
- result[i] = excerpt.Id
- i++
- }
-
- return result
-}
-
-func (c *RepoCache) NewIdentityFromGitUser() (*IdentityCache, error) {
- return c.NewIdentityFromGitUserRaw(nil)
-}
-
-func (c *RepoCache) NewIdentityFromGitUserRaw(metadata map[string]string) (*IdentityCache, error) {
- i, err := identity.NewFromGitUser(c.repo)
- if err != nil {
- return nil, err
- }
- return c.finishIdentity(i, metadata)
-}
-
-// NewIdentity create a new identity
-// The new identity is written in the repository (commit)
-func (c *RepoCache) NewIdentity(name string, email string) (*IdentityCache, error) {
- return c.NewIdentityRaw(name, email, "", "", nil, nil)
-}
-
-// NewIdentityFull create a new identity
-// The new identity is written in the repository (commit)
-func (c *RepoCache) NewIdentityFull(name string, email string, login string, avatarUrl string, keys []*identity.Key) (*IdentityCache, error) {
- return c.NewIdentityRaw(name, email, login, avatarUrl, keys, nil)
-}
-
-func (c *RepoCache) NewIdentityRaw(name string, email string, login string, avatarUrl string, keys []*identity.Key, metadata map[string]string) (*IdentityCache, error) {
- i, err := identity.NewIdentityFull(c.repo, name, email, login, avatarUrl, keys)
- if err != nil {
- return nil, err
- }
- return c.finishIdentity(i, metadata)
-}
-
-func (c *RepoCache) finishIdentity(i *identity.Identity, metadata map[string]string) (*IdentityCache, error) {
- for key, value := range metadata {
- i.SetMetadata(key, value)
- }
-
- err := i.Commit(c.repo)
- if err != nil {
- return nil, err
- }
-
- c.muIdentity.Lock()
- if _, has := c.identities[i.Id()]; has {
- return nil, fmt.Errorf("identity %s already exist in the cache", i.Id())
- }
-
- cached := NewIdentityCache(c, i)
- c.identities[i.Id()] = cached
- c.muIdentity.Unlock()
-
- // force the write of the excerpt
- err = c.identityUpdated(i.Id())
- if err != nil {
- return nil, err
- }
-
- return cached, nil
-}
diff --git a/cache/repo_cache_test.go b/cache/repo_cache_test.go
index a9557ff0..796b5db9 100644
--- a/cache/repo_cache_test.go
+++ b/cache/repo_cache_test.go
@@ -9,6 +9,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/MichaelMure/git-bug/entities/bug"
+ "github.com/MichaelMure/git-bug/entities/identity"
+ "github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/query"
"github.com/MichaelMure/git-bug/repository"
)
@@ -16,11 +18,11 @@ import (
func TestCache(t *testing.T) {
repo := repository.CreateGoGitTestRepo(t, false)
- cache, err := NewRepoCache(repo)
+ cache, err := NewRepoCacheNoEvents(repo)
require.NoError(t, err)
// Create, set and get user identity
- iden1, err := cache.NewIdentity("René Descartes", "rene@descartes.fr")
+ iden1, err := cache.Identities().New("René Descartes", "rene@descartes.fr")
require.NoError(t, err)
err = cache.SetUserIdentity(iden1)
require.NoError(t, err)
@@ -29,102 +31,122 @@ func TestCache(t *testing.T) {
require.Equal(t, iden1.Id(), userIden.Id())
// it's possible to create two identical identities
- iden2, err := cache.NewIdentity("René Descartes", "rene@descartes.fr")
+ iden2, err := cache.Identities().New("René Descartes", "rene@descartes.fr")
require.NoError(t, err)
// Two identical identities yield a different id
require.NotEqual(t, iden1.Id(), iden2.Id())
+ indexCount := func(name string) uint64 {
+ idx, err := repo.GetIndex(name)
+ require.NoError(t, err)
+ count, err := idx.DocCount()
+ require.NoError(t, err)
+ return count
+ }
+
// There is now two identities in the cache
- require.Len(t, cache.AllIdentityIds(), 2)
- require.Len(t, cache.identitiesExcerpts, 2)
- require.Len(t, cache.identities, 2)
+ require.Len(t, cache.Identities().AllIds(), 2)
+ require.Len(t, cache.identities.excerpts, 2)
+ require.Len(t, cache.identities.cached, 2)
+ require.Equal(t, uint64(2), indexCount(identity.Namespace))
+ require.Equal(t, uint64(0), indexCount(bug.Namespace))
// Create a bug
- bug1, _, err := cache.NewBug("title", "message")
+ bug1, _, err := cache.Bugs().New("title", "message")
require.NoError(t, err)
// It's possible to create two identical bugs
- bug2, _, err := cache.NewBug("title", "message")
+ bug2, _, err := cache.Bugs().New("title", "marker")
require.NoError(t, err)
// two identical bugs yield a different id
require.NotEqual(t, bug1.Id(), bug2.Id())
// There is now two bugs in the cache
- require.Len(t, cache.AllBugsIds(), 2)
- require.Len(t, cache.bugExcerpts, 2)
- require.Len(t, cache.bugs, 2)
+ require.Len(t, cache.Bugs().AllIds(), 2)
+ require.Len(t, cache.bugs.excerpts, 2)
+ require.Len(t, cache.bugs.cached, 2)
+ require.Equal(t, uint64(2), indexCount(identity.Namespace))
+ require.Equal(t, uint64(2), indexCount(bug.Namespace))
// Resolving
- _, err = cache.ResolveIdentity(iden1.Id())
+ _, err = cache.Identities().Resolve(iden1.Id())
require.NoError(t, err)
- _, err = cache.ResolveIdentityExcerpt(iden1.Id())
+ _, err = cache.Identities().ResolveExcerpt(iden1.Id())
require.NoError(t, err)
- _, err = cache.ResolveIdentityPrefix(iden1.Id().String()[:10])
+ _, err = cache.Identities().ResolvePrefix(iden1.Id().String()[:10])
require.NoError(t, err)
- _, err = cache.ResolveBug(bug1.Id())
+ _, err = cache.Bugs().Resolve(bug1.Id())
require.NoError(t, err)
- _, err = cache.ResolveBugExcerpt(bug1.Id())
+ _, err = cache.Bugs().ResolveExcerpt(bug1.Id())
require.NoError(t, err)
- _, err = cache.ResolveBugPrefix(bug1.Id().String()[:10])
+ _, err = cache.Bugs().ResolvePrefix(bug1.Id().String()[:10])
require.NoError(t, err)
// Querying
q, err := query.Parse("status:open author:descartes sort:edit-asc")
require.NoError(t, err)
- res, err := cache.QueryBugs(q)
+ res, err := cache.Bugs().Query(q)
require.NoError(t, err)
require.Len(t, res, 2)
+ q, err = query.Parse("status:open marker") // full-text search
+ require.NoError(t, err)
+ res, err = cache.Bugs().Query(q)
+ require.NoError(t, err)
+ require.Len(t, res, 1)
+
// Close
require.NoError(t, cache.Close())
- require.Empty(t, cache.bugs)
- require.Empty(t, cache.bugExcerpts)
- require.Empty(t, cache.identities)
- require.Empty(t, cache.identitiesExcerpts)
+ require.Empty(t, cache.bugs.cached)
+ require.Empty(t, cache.bugs.excerpts)
+ require.Empty(t, cache.identities.cached)
+ require.Empty(t, cache.identities.excerpts)
// Reload, only excerpt are loaded, but as we need to load the identities used in the bugs
// to check the signatures, we also load the identity used above
- cache, err = NewRepoCache(repo)
+ cache, err = NewRepoCacheNoEvents(repo)
require.NoError(t, err)
- require.Empty(t, cache.bugs)
- require.Len(t, cache.identities, 1)
- require.Len(t, cache.bugExcerpts, 2)
- require.Len(t, cache.identitiesExcerpts, 2)
+ require.Len(t, cache.bugs.cached, 0)
+ require.Len(t, cache.bugs.excerpts, 2)
+ require.Len(t, cache.identities.cached, 0)
+ require.Len(t, cache.identities.excerpts, 2)
+ require.Equal(t, uint64(2), indexCount(identity.Namespace))
+ require.Equal(t, uint64(2), indexCount(bug.Namespace))
// Resolving load from the disk
- _, err = cache.ResolveIdentity(iden1.Id())
+ _, err = cache.Identities().Resolve(iden1.Id())
require.NoError(t, err)
- _, err = cache.ResolveIdentityExcerpt(iden1.Id())
+ _, err = cache.Identities().ResolveExcerpt(iden1.Id())
require.NoError(t, err)
- _, err = cache.ResolveIdentityPrefix(iden1.Id().String()[:10])
+ _, err = cache.Identities().ResolvePrefix(iden1.Id().String()[:10])
require.NoError(t, err)
- _, err = cache.ResolveBug(bug1.Id())
+ _, err = cache.Bugs().Resolve(bug1.Id())
require.NoError(t, err)
- _, err = cache.ResolveBugExcerpt(bug1.Id())
+ _, err = cache.Bugs().ResolveExcerpt(bug1.Id())
require.NoError(t, err)
- _, err = cache.ResolveBugPrefix(bug1.Id().String()[:10])
+ _, err = cache.Bugs().ResolvePrefix(bug1.Id().String()[:10])
require.NoError(t, err)
}
func TestCachePushPull(t *testing.T) {
repoA, repoB, _ := repository.SetupGoGitReposAndRemote(t)
- cacheA, err := NewRepoCache(repoA)
+ cacheA, err := NewRepoCacheNoEvents(repoA)
require.NoError(t, err)
- cacheB, err := NewRepoCache(repoB)
+ cacheB, err := NewRepoCacheNoEvents(repoB)
require.NoError(t, err)
// Create, set and get user identity
- reneA, err := cacheA.NewIdentity("René Descartes", "rene@descartes.fr")
+ reneA, err := cacheA.Identities().New("René Descartes", "rene@descartes.fr")
require.NoError(t, err)
err = cacheA.SetUserIdentity(reneA)
require.NoError(t, err)
- isaacB, err := cacheB.NewIdentity("Isaac Newton", "isaac@newton.uk")
+ isaacB, err := cacheB.Identities().New("Isaac Newton", "isaac@newton.uk")
require.NoError(t, err)
err = cacheB.SetUserIdentity(isaacB)
require.NoError(t, err)
@@ -136,7 +158,7 @@ func TestCachePushPull(t *testing.T) {
require.NoError(t, err)
// Create a bug in A
- _, _, err = cacheA.NewBug("bug1", "message")
+ _, _, err = cacheA.Bugs().New("bug1", "message")
require.NoError(t, err)
// A --> remote --> B
@@ -146,17 +168,17 @@ func TestCachePushPull(t *testing.T) {
err = cacheB.Pull("origin")
require.NoError(t, err)
- require.Len(t, cacheB.AllBugsIds(), 1)
+ require.Len(t, cacheB.Bugs().AllIds(), 1)
// retrieve and set identity
- reneB, err := cacheB.ResolveIdentity(reneA.Id())
+ reneB, err := cacheB.Identities().Resolve(reneA.Id())
require.NoError(t, err)
err = cacheB.SetUserIdentity(reneB)
require.NoError(t, err)
// B --> remote --> A
- _, _, err = cacheB.NewBug("bug2", "message")
+ _, _, err = cacheB.Bugs().New("bug2", "message")
require.NoError(t, err)
_, err = cacheB.Push("origin")
@@ -165,7 +187,7 @@ func TestCachePushPull(t *testing.T) {
err = cacheA.Pull("origin")
require.NoError(t, err)
- require.Len(t, cacheA.AllBugsIds(), 2)
+ require.Len(t, cacheA.Bugs().AllIds(), 2)
}
func TestRemove(t *testing.T) {
@@ -179,20 +201,20 @@ func TestRemove(t *testing.T) {
err = repo.AddRemote("remoteB", remoteB.GetLocalRemote())
require.NoError(t, err)
- repoCache, err := NewRepoCache(repo)
+ repoCache, err := NewRepoCacheNoEvents(repo)
require.NoError(t, err)
- rene, err := repoCache.NewIdentity("René Descartes", "rene@descartes.fr")
+ rene, err := repoCache.Identities().New("René Descartes", "rene@descartes.fr")
require.NoError(t, err)
err = repoCache.SetUserIdentity(rene)
require.NoError(t, err)
- _, _, err = repoCache.NewBug("title", "message")
+ _, _, err = repoCache.Bugs().New("title", "message")
require.NoError(t, err)
// and one more for testing
- b1, _, err := repoCache.NewBug("title", "message")
+ b1, _, err := repoCache.Bugs().New("title", "message")
require.NoError(t, err)
_, err = repoCache.Push("remoteA")
@@ -207,72 +229,72 @@ func TestRemove(t *testing.T) {
_, err = repoCache.Fetch("remoteB")
require.NoError(t, err)
- err = repoCache.RemoveBug(b1.Id().String())
+ err = repoCache.Bugs().Remove(b1.Id().String())
require.NoError(t, err)
- assert.Equal(t, 1, len(repoCache.bugs))
- assert.Equal(t, 1, len(repoCache.bugExcerpts))
+ assert.Len(t, repoCache.bugs.cached, 1)
+ assert.Len(t, repoCache.bugs.excerpts, 1)
- _, err = repoCache.ResolveBug(b1.Id())
- assert.Error(t, bug.ErrBugNotExist, err)
+ _, err = repoCache.Bugs().Resolve(b1.Id())
+ assert.ErrorAs(t, entity.ErrNotFound{}, err)
}
func TestCacheEviction(t *testing.T) {
repo := repository.CreateGoGitTestRepo(t, false)
- repoCache, err := NewRepoCache(repo)
+ repoCache, err := NewRepoCacheNoEvents(repo)
require.NoError(t, err)
repoCache.setCacheSize(2)
- require.Equal(t, 2, repoCache.maxLoadedBugs)
- require.Equal(t, 0, repoCache.loadedBugs.Len())
- require.Equal(t, 0, len(repoCache.bugs))
+ require.Equal(t, 2, repoCache.bugs.maxLoaded)
+ require.Len(t, repoCache.bugs.cached, 0)
+ require.Equal(t, repoCache.bugs.lru.Len(), 0)
// Generating some bugs
- rene, err := repoCache.NewIdentity("René Descartes", "rene@descartes.fr")
+ rene, err := repoCache.Identities().New("René Descartes", "rene@descartes.fr")
require.NoError(t, err)
err = repoCache.SetUserIdentity(rene)
require.NoError(t, err)
- bug1, _, err := repoCache.NewBug("title", "message")
+ bug1, _, err := repoCache.Bugs().New("title", "message")
require.NoError(t, err)
checkBugPresence(t, repoCache, bug1, true)
- require.Equal(t, 1, repoCache.loadedBugs.Len())
- require.Equal(t, 1, len(repoCache.bugs))
+ require.Len(t, repoCache.bugs.cached, 1)
+ require.Equal(t, 1, repoCache.bugs.lru.Len())
- bug2, _, err := repoCache.NewBug("title", "message")
+ bug2, _, err := repoCache.Bugs().New("title", "message")
require.NoError(t, err)
checkBugPresence(t, repoCache, bug1, true)
checkBugPresence(t, repoCache, bug2, true)
- require.Equal(t, 2, repoCache.loadedBugs.Len())
- require.Equal(t, 2, len(repoCache.bugs))
+ require.Len(t, repoCache.bugs.cached, 2)
+ require.Equal(t, 2, repoCache.bugs.lru.Len())
// Number of bugs should not exceed max size of lruCache, oldest one should be evicted
- bug3, _, err := repoCache.NewBug("title", "message")
+ bug3, _, err := repoCache.Bugs().New("title", "message")
require.NoError(t, err)
- require.Equal(t, 2, repoCache.loadedBugs.Len())
- require.Equal(t, 2, len(repoCache.bugs))
+ require.Len(t, repoCache.bugs.cached, 2)
+ require.Equal(t, 2, repoCache.bugs.lru.Len())
checkBugPresence(t, repoCache, bug1, false)
checkBugPresence(t, repoCache, bug2, true)
checkBugPresence(t, repoCache, bug3, true)
// Accessing bug should update position in lruCache and therefore it should not be evicted
- repoCache.loadedBugs.Get(bug2.Id())
- oldestId, _ := repoCache.loadedBugs.GetOldest()
+ repoCache.bugs.lru.Get(bug2.Id())
+ oldestId, _ := repoCache.bugs.lru.GetOldest()
require.Equal(t, bug3.Id(), oldestId)
checkBugPresence(t, repoCache, bug1, false)
checkBugPresence(t, repoCache, bug2, true)
checkBugPresence(t, repoCache, bug3, true)
- require.Equal(t, 2, repoCache.loadedBugs.Len())
- require.Equal(t, 2, len(repoCache.bugs))
+ require.Len(t, repoCache.bugs.cached, 2)
+ require.Equal(t, 2, repoCache.bugs.lru.Len())
}
func checkBugPresence(t *testing.T, cache *RepoCache, bug *BugCache, presence bool) {
id := bug.Id()
- require.Equal(t, presence, cache.loadedBugs.Contains(id))
- b, ok := cache.bugs[id]
+ require.Equal(t, presence, cache.bugs.lru.Contains(id))
+ b, ok := cache.bugs.cached[id]
require.Equal(t, presence, ok)
if ok {
require.Equal(t, bug, b)
@@ -286,12 +308,12 @@ func TestLongDescription(t *testing.T) {
repo := repository.CreateGoGitTestRepo(t, false)
- backend, err := NewRepoCache(repo)
+ backend, err := NewRepoCacheNoEvents(repo)
require.NoError(t, err)
- i, err := backend.NewIdentity("René Descartes", "rene@descartes.fr")
+ i, err := backend.Identities().New("René Descartes", "rene@descartes.fr")
require.NoError(t, err)
- _, _, err = backend.NewBugRaw(i, time.Now().Unix(), text, text, nil, nil)
+ _, _, err = backend.Bugs().NewRaw(i, time.Now().Unix(), text, text, nil, nil)
require.NoError(t, err)
}
diff --git a/cache/resolvers.go b/cache/resolvers.go
deleted file mode 100644
index 9ed2fa4c..00000000
--- a/cache/resolvers.go
+++ /dev/null
@@ -1,42 +0,0 @@
-package cache
-
-import (
- "github.com/MichaelMure/git-bug/entity"
-)
-
-func makeResolvers(cache *RepoCache) entity.Resolvers {
- return entity.Resolvers{
- &IdentityCache{}: newIdentityCacheResolver(cache),
- &BugCache{}: newBugCacheResolver(cache),
- }
-}
-
-var _ entity.Resolver = &identityCacheResolver{}
-
-// identityCacheResolver is an identity Resolver that retrieve identities from
-// the cache
-type identityCacheResolver struct {
- cache *RepoCache
-}
-
-func newIdentityCacheResolver(cache *RepoCache) *identityCacheResolver {
- return &identityCacheResolver{cache: cache}
-}
-
-func (i *identityCacheResolver) Resolve(id entity.Id) (entity.Interface, error) {
- return i.cache.ResolveIdentity(id)
-}
-
-var _ entity.Resolver = &bugCacheResolver{}
-
-type bugCacheResolver struct {
- cache *RepoCache
-}
-
-func newBugCacheResolver(cache *RepoCache) *bugCacheResolver {
- return &bugCacheResolver{cache: cache}
-}
-
-func (b *bugCacheResolver) Resolve(id entity.Id) (entity.Interface, error) {
- return b.cache.ResolveBug(id)
-}
diff --git a/cache/subcache.go b/cache/subcache.go
new file mode 100644
index 00000000..7757ce82
--- /dev/null
+++ b/cache/subcache.go
@@ -0,0 +1,505 @@
+package cache
+
+import (
+ "bytes"
+ "encoding/gob"
+ "fmt"
+ "path/filepath"
+ "sync"
+
+ "github.com/pkg/errors"
+
+ "github.com/MichaelMure/git-bug/entities/identity"
+ "github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/repository"
+)
+
+type Excerpt interface {
+ Id() entity.Id
+ setId(id entity.Id)
+}
+
+type CacheEntity interface {
+ Id() entity.Id
+ NeedCommit() bool
+ Lock()
+}
+
+type getUserIdentityFunc func() (*IdentityCache, error)
+
+// Actions expose a number of action functions on Entities, to give upper layers (cache) a way to normalize interactions.
+// Note: ideally this wouldn't exist, the cache layer would assume that everything is an entity/dag, and directly use the
+// functions from this package, but right now identities are not using that framework.
+type Actions[EntityT entity.Interface] struct {
+ ReadWithResolver func(repo repository.ClockedRepo, resolvers entity.Resolvers, id entity.Id) (EntityT, error)
+ ReadAllWithResolver func(repo repository.ClockedRepo, resolvers entity.Resolvers) <-chan entity.StreamedEntity[EntityT]
+ Remove func(repo repository.ClockedRepo, id entity.Id) error
+ MergeAll func(repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, mergeAuthor identity.Interface) <-chan entity.MergeResult
+}
+
+var _ cacheMgmt = &SubCache[entity.Interface, Excerpt, CacheEntity]{}
+
+type SubCache[EntityT entity.Interface, ExcerptT Excerpt, CacheT CacheEntity] struct {
+ repo repository.ClockedRepo
+ resolvers func() entity.Resolvers
+
+ getUserIdentity getUserIdentityFunc
+ makeCached func(entity EntityT, entityUpdated func(id entity.Id) error) CacheT
+ makeExcerpt func(CacheT) ExcerptT
+ makeIndexData func(CacheT) []string
+ actions Actions[EntityT]
+
+ typename string
+ namespace string
+ version uint
+ maxLoaded int
+
+ mu sync.RWMutex
+ excerpts map[entity.Id]ExcerptT
+ cached map[entity.Id]CacheT
+ lru *lruIdCache
+}
+
+func NewSubCache[EntityT entity.Interface, ExcerptT Excerpt, CacheT CacheEntity](
+ repo repository.ClockedRepo,
+ resolvers func() entity.Resolvers, getUserIdentity getUserIdentityFunc,
+ makeCached func(entity EntityT, entityUpdated func(id entity.Id) error) CacheT,
+ makeExcerpt func(CacheT) ExcerptT,
+ makeIndexData func(CacheT) []string,
+ actions Actions[EntityT],
+ typename, namespace string,
+ version uint, maxLoaded int) *SubCache[EntityT, ExcerptT, CacheT] {
+ return &SubCache[EntityT, ExcerptT, CacheT]{
+ repo: repo,
+ resolvers: resolvers,
+ getUserIdentity: getUserIdentity,
+ makeCached: makeCached,
+ makeExcerpt: makeExcerpt,
+ makeIndexData: makeIndexData,
+ actions: actions,
+ typename: typename,
+ namespace: namespace,
+ version: version,
+ maxLoaded: maxLoaded,
+ excerpts: make(map[entity.Id]ExcerptT),
+ cached: make(map[entity.Id]CacheT),
+ lru: newLRUIdCache(),
+ }
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) Typename() string {
+ return sc.typename
+}
+
+// Load will try to read from the disk the entity cache file
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) Load() error {
+ sc.mu.Lock()
+ defer sc.mu.Unlock()
+
+ f, err := sc.repo.LocalStorage().Open(filepath.Join("cache", sc.namespace))
+ if err != nil {
+ return err
+ }
+
+ decoder := gob.NewDecoder(f)
+
+ aux := struct {
+ Version uint
+ Excerpts map[entity.Id]ExcerptT
+ }{}
+
+ err = decoder.Decode(&aux)
+ if err != nil {
+ return err
+ }
+
+ if aux.Version != sc.version {
+ return fmt.Errorf("unknown %s cache format version %v", sc.namespace, aux.Version)
+ }
+
+ // the id is not serialized in the excerpt itself (non-exported field in go, long story ...),
+ // so we fix it here, which doubles as enforcing coherency.
+ for id, excerpt := range aux.Excerpts {
+ excerpt.setId(id)
+ }
+
+ sc.excerpts = aux.Excerpts
+
+ index, err := sc.repo.GetIndex(sc.namespace)
+ if err != nil {
+ return err
+ }
+
+ // simple heuristic to detect a mismatch between the index and the entities
+ count, err := index.DocCount()
+ if err != nil {
+ return err
+ }
+ if count != uint64(len(sc.excerpts)) {
+ return fmt.Errorf("count mismatch between bleve and %s excerpts", sc.namespace)
+ }
+
+ return nil
+}
+
+// Write will serialize on disk the entity cache file
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) write() error {
+ sc.mu.RLock()
+ defer sc.mu.RUnlock()
+
+ var data bytes.Buffer
+
+ aux := struct {
+ Version uint
+ Excerpts map[entity.Id]ExcerptT
+ }{
+ Version: sc.version,
+ Excerpts: sc.excerpts,
+ }
+
+ encoder := gob.NewEncoder(&data)
+
+ err := encoder.Encode(aux)
+ if err != nil {
+ return err
+ }
+
+ f, err := sc.repo.LocalStorage().Create(filepath.Join("cache", sc.namespace))
+ if err != nil {
+ return err
+ }
+
+ _, err = f.Write(data.Bytes())
+ if err != nil {
+ return err
+ }
+
+ return f.Close()
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) Build() error {
+ sc.excerpts = make(map[entity.Id]ExcerptT)
+
+ allEntities := sc.actions.ReadAllWithResolver(sc.repo, sc.resolvers())
+
+ index, err := sc.repo.GetIndex(sc.namespace)
+ if err != nil {
+ return err
+ }
+
+ // wipe the index just to be sure
+ err = index.Clear()
+ if err != nil {
+ return err
+ }
+
+ indexer, indexEnd := index.IndexBatch()
+
+ for e := range allEntities {
+ if e.Err != nil {
+ return e.Err
+ }
+
+ cached := sc.makeCached(e.Entity, sc.entityUpdated)
+ sc.excerpts[e.Entity.Id()] = sc.makeExcerpt(cached)
+ // might as well keep them in memory
+ sc.cached[e.Entity.Id()] = cached
+
+ indexData := sc.makeIndexData(cached)
+ if err := indexer(e.Entity.Id().String(), indexData); err != nil {
+ return err
+ }
+ }
+
+ err = indexEnd()
+ if err != nil {
+ return err
+ }
+
+ err = sc.write()
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) SetCacheSize(size int) {
+ sc.maxLoaded = size
+ sc.evictIfNeeded()
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) Close() error {
+ sc.mu.Lock()
+ defer sc.mu.Unlock()
+ sc.excerpts = nil
+ sc.cached = make(map[entity.Id]CacheT)
+ return nil
+}
+
+// AllIds return all known bug ids
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) AllIds() []entity.Id {
+ sc.mu.RLock()
+ defer sc.mu.RUnlock()
+
+ result := make([]entity.Id, len(sc.excerpts))
+
+ i := 0
+ for _, excerpt := range sc.excerpts {
+ result[i] = excerpt.Id()
+ i++
+ }
+
+ return result
+}
+
+// Resolve retrieve an entity matching the exact given id
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) Resolve(id entity.Id) (CacheT, error) {
+ sc.mu.RLock()
+ cached, ok := sc.cached[id]
+ if ok {
+ sc.lru.Get(id)
+ sc.mu.RUnlock()
+ return cached, nil
+ }
+ sc.mu.RUnlock()
+
+ e, err := sc.actions.ReadWithResolver(sc.repo, sc.resolvers(), id)
+ if err != nil {
+ return *new(CacheT), err
+ }
+
+ cached = sc.makeCached(e, sc.entityUpdated)
+
+ sc.mu.Lock()
+ sc.cached[id] = cached
+ sc.lru.Add(id)
+ sc.mu.Unlock()
+
+ sc.evictIfNeeded()
+
+ return cached, nil
+}
+
+// ResolvePrefix retrieve an entity matching an id prefix. It fails if multiple
+// entity match.
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) ResolvePrefix(prefix string) (CacheT, error) {
+ return sc.ResolveMatcher(func(excerpt ExcerptT) bool {
+ return excerpt.Id().HasPrefix(prefix)
+ })
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) ResolveMatcher(f func(ExcerptT) bool) (CacheT, error) {
+ id, err := sc.resolveMatcher(f)
+ if err != nil {
+ return *new(CacheT), err
+ }
+ return sc.Resolve(id)
+}
+
+// ResolveExcerpt retrieve an Excerpt matching the exact given id
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) ResolveExcerpt(id entity.Id) (ExcerptT, error) {
+ sc.mu.RLock()
+ defer sc.mu.RUnlock()
+
+ excerpt, ok := sc.excerpts[id]
+ if !ok {
+ return *new(ExcerptT), entity.NewErrNotFound(sc.typename)
+ }
+
+ return excerpt, nil
+}
+
+// ResolveExcerptPrefix retrieve an Excerpt matching an id prefix. It fails if multiple
+// entity match.
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) ResolveExcerptPrefix(prefix string) (ExcerptT, error) {
+ return sc.ResolveExcerptMatcher(func(excerpt ExcerptT) bool {
+ return excerpt.Id().HasPrefix(prefix)
+ })
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) ResolveExcerptMatcher(f func(ExcerptT) bool) (ExcerptT, error) {
+ id, err := sc.resolveMatcher(f)
+ if err != nil {
+ return *new(ExcerptT), err
+ }
+ return sc.ResolveExcerpt(id)
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) resolveMatcher(f func(ExcerptT) bool) (entity.Id, error) {
+ sc.mu.RLock()
+ defer sc.mu.RUnlock()
+
+ // preallocate but empty
+ matching := make([]entity.Id, 0, 5)
+
+ for _, excerpt := range sc.excerpts {
+ if f(excerpt) {
+ matching = append(matching, excerpt.Id())
+ }
+ }
+
+ if len(matching) > 1 {
+ return entity.UnsetId, entity.NewErrMultipleMatch(sc.typename, matching)
+ }
+
+ if len(matching) == 0 {
+ return entity.UnsetId, entity.NewErrNotFound(sc.typename)
+ }
+
+ return matching[0], nil
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) add(e EntityT) (CacheT, error) {
+ sc.mu.Lock()
+ if _, has := sc.cached[e.Id()]; has {
+ sc.mu.Unlock()
+ return *new(CacheT), fmt.Errorf("entity %s already exist in the cache", e.Id())
+ }
+
+ cached := sc.makeCached(e, sc.entityUpdated)
+ sc.cached[e.Id()] = cached
+ sc.lru.Add(e.Id())
+ sc.mu.Unlock()
+
+ sc.evictIfNeeded()
+
+ // force the write of the excerpt
+ err := sc.entityUpdated(e.Id())
+ if err != nil {
+ return *new(CacheT), err
+ }
+
+ return cached, nil
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) Remove(prefix string) error {
+ e, err := sc.ResolvePrefix(prefix)
+ if err != nil {
+ return err
+ }
+
+ sc.mu.Lock()
+
+ err = sc.actions.Remove(sc.repo, e.Id())
+ if err != nil {
+ sc.mu.Unlock()
+ return err
+ }
+
+ delete(sc.cached, e.Id())
+ delete(sc.excerpts, e.Id())
+ sc.lru.Remove(e.Id())
+
+ sc.mu.Unlock()
+
+ return sc.write()
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) MergeAll(remote string) <-chan entity.MergeResult {
+ out := make(chan entity.MergeResult)
+
+ // Intercept merge results to update the cache properly
+ go func() {
+ defer close(out)
+
+ author, err := sc.getUserIdentity()
+ if err != nil {
+ out <- entity.NewMergeError(err, "")
+ return
+ }
+
+ results := sc.actions.MergeAll(sc.repo, sc.resolvers(), remote, author)
+ for result := range results {
+ out <- result
+
+ if result.Err != nil {
+ continue
+ }
+
+ switch result.Status {
+ case entity.MergeStatusNew, entity.MergeStatusUpdated:
+ e := result.Entity.(EntityT)
+ cached := sc.makeCached(e, sc.entityUpdated)
+
+ sc.mu.Lock()
+ sc.excerpts[result.Id] = sc.makeExcerpt(cached)
+ // might as well keep them in memory
+ sc.cached[result.Id] = cached
+ sc.mu.Unlock()
+ }
+ }
+
+ err = sc.write()
+ if err != nil {
+ out <- entity.NewMergeError(err, "")
+ return
+ }
+ }()
+
+ return out
+
+}
+
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) GetNamespace() string {
+ return sc.namespace
+}
+
+// entityUpdated is a callback to trigger when the excerpt of an entity changed
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) entityUpdated(id entity.Id) error {
+ sc.mu.Lock()
+ e, ok := sc.cached[id]
+ if !ok {
+ sc.mu.Unlock()
+
+ // if the bug is not loaded at this point, it means it was loaded before
+ // but got evicted. Which means we potentially have multiple copies in
+ // memory and thus concurrent write.
+ // Failing immediately here is the simple and safe solution to avoid
+ // complicated data loss.
+ return errors.New("entity missing from cache")
+ }
+ sc.lru.Get(id)
+ // sc.excerpts[id] = bug2.NewBugExcerpt(b.bug, b.Snapshot())
+ sc.excerpts[id] = sc.makeExcerpt(e)
+ sc.mu.Unlock()
+
+ index, err := sc.repo.GetIndex(sc.namespace)
+ if err != nil {
+ return err
+ }
+
+ err = index.IndexOne(e.Id().String(), sc.makeIndexData(e))
+ if err != nil {
+ return err
+ }
+
+ return sc.write()
+}
+
+// evictIfNeeded will evict an entity from the cache if needed
+func (sc *SubCache[EntityT, ExcerptT, CacheT]) evictIfNeeded() {
+ sc.mu.Lock()
+ defer sc.mu.Unlock()
+ if sc.lru.Len() <= sc.maxLoaded {
+ return
+ }
+
+ for _, id := range sc.lru.GetOldestToNewest() {
+ b := sc.cached[id]
+ if b.NeedCommit() {
+ continue
+ }
+
+ // as a form of assurance that evicted entities don't get manipulated, we lock them here.
+ // if something try to do it anyway, it will lock the program and make it obvious.
+ b.Lock()
+
+ sc.lru.Remove(id)
+ delete(sc.cached, id)
+
+ if sc.lru.Len() <= sc.maxLoaded {
+ return
+ }
+ }
+}
diff --git a/cache/with_snapshot.go b/cache/with_snapshot.go
new file mode 100644
index 00000000..674b6923
--- /dev/null
+++ b/cache/with_snapshot.go
@@ -0,0 +1,56 @@
+package cache
+
+import (
+ "sync"
+
+ "github.com/MichaelMure/git-bug/entity/dag"
+ "github.com/MichaelMure/git-bug/repository"
+)
+
+var _ dag.Interface[dag.Snapshot, dag.OperationWithApply[dag.Snapshot]] = &withSnapshot[dag.Snapshot, dag.OperationWithApply[dag.Snapshot]]{}
+
+// withSnapshot encapsulate an entity and maintain a snapshot efficiently.
+type withSnapshot[SnapT dag.Snapshot, OpT dag.OperationWithApply[SnapT]] struct {
+ dag.Interface[SnapT, OpT]
+ mu sync.Mutex
+ snap *SnapT
+}
+
+func (ws *withSnapshot[SnapT, OpT]) Compile() SnapT {
+ ws.mu.Lock()
+ defer ws.mu.Unlock()
+ if ws.snap == nil {
+ snap := ws.Interface.Compile()
+ ws.snap = &snap
+ }
+ return *ws.snap
+}
+
+// Append intercept Bug.Append() to update the snapshot efficiently
+func (ws *withSnapshot[SnapT, OpT]) Append(op OpT) {
+ ws.mu.Lock()
+ defer ws.mu.Unlock()
+
+ ws.Interface.Append(op)
+
+ if ws.snap == nil {
+ return
+ }
+
+ op.Apply(*ws.snap)
+ (*ws.snap).AppendOperation(op)
+}
+
+// Commit intercept Bug.Commit() to update the snapshot efficiently
+func (ws *withSnapshot[SnapT, OpT]) Commit(repo repository.ClockedRepo) error {
+ ws.mu.Lock()
+ defer ws.mu.Unlock()
+
+ err := ws.Interface.Commit(repo)
+ if err != nil {
+ ws.snap = nil
+ return err
+ }
+
+ return nil
+}
diff --git a/commands/bridge/bridge_auth_addtoken.go b/commands/bridge/bridge_auth_addtoken.go
index bcab7fc3..2992fa63 100644
--- a/commands/bridge/bridge_auth_addtoken.go
+++ b/commands/bridge/bridge_auth_addtoken.go
@@ -94,7 +94,7 @@ func runBridgeAuthAddToken(env *execenv.Env, opts bridgeAuthAddTokenOptions, arg
if opts.user == "" {
user, err = env.Backend.GetUserIdentity()
} else {
- user, err = env.Backend.ResolveIdentityPrefix(opts.user)
+ user, err = env.Backend.Identities().ResolvePrefix(opts.user)
}
if err != nil {
return err
diff --git a/commands/bug/bug.go b/commands/bug/bug.go
index 04bf8980..bab040d8 100644
--- a/commands/bug/bug.go
+++ b/commands/bug/bug.go
@@ -142,14 +142,14 @@ func runBug(env *execenv.Env, opts bugOptions, args []string) error {
return err
}
- allIds, err := env.Backend.QueryBugs(q)
+ allIds, err := env.Backend.Bugs().Query(q)
if err != nil {
return err
}
bugExcerpt := make([]*cache.BugExcerpt, len(allIds))
for i, id := range allIds {
- b, err := env.Backend.ResolveBugExcerpt(id)
+ b, err := env.Backend.Bugs().ResolveExcerpt(id)
if err != nil {
return err
}
@@ -208,8 +208,8 @@ func bugsJsonFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error
jsonBugs := make([]JSONBugExcerpt, len(bugExcerpts))
for i, b := range bugExcerpts {
jsonBug := JSONBugExcerpt{
- Id: b.Id.String(),
- HumanId: b.Id.Human(),
+ Id: b.Id().String(),
+ HumanId: b.Id().Human(),
CreateTime: cmdjson.NewTime(b.CreateTime(), b.CreateLamportTime),
EditTime: cmdjson.NewTime(b.EditTime(), b.EditLamportTime),
Status: b.Status.String(),
@@ -219,7 +219,7 @@ func bugsJsonFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error
Metadata: b.CreateMetadata,
}
- author, err := env.Backend.ResolveIdentityExcerpt(b.AuthorId)
+ author, err := env.Backend.Identities().ResolveExcerpt(b.AuthorId)
if err != nil {
return err
}
@@ -227,7 +227,7 @@ func bugsJsonFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error
jsonBug.Actors = make([]cmdjson.Identity, len(b.Actors))
for i, element := range b.Actors {
- actor, err := env.Backend.ResolveIdentityExcerpt(element)
+ actor, err := env.Backend.Identities().ResolveExcerpt(element)
if err != nil {
return err
}
@@ -236,7 +236,7 @@ func bugsJsonFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error
jsonBug.Participants = make([]cmdjson.Identity, len(b.Participants))
for i, element := range b.Participants {
- participant, err := env.Backend.ResolveIdentityExcerpt(element)
+ participant, err := env.Backend.Identities().ResolveExcerpt(element)
if err != nil {
return err
}
@@ -252,7 +252,7 @@ func bugsJsonFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error
func bugsCompactFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error {
for _, b := range bugExcerpts {
- author, err := env.Backend.ResolveIdentityExcerpt(b.AuthorId)
+ author, err := env.Backend.Identities().ResolveExcerpt(b.AuthorId)
if err != nil {
return err
}
@@ -266,7 +266,7 @@ func bugsCompactFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) err
}
env.Out.Printf("%s %s %s %s %s\n",
- colors.Cyan(b.Id.Human()),
+ colors.Cyan(b.Id().Human()),
colors.Yellow(b.Status),
text.LeftPadMaxLine(strings.TrimSpace(b.Title), 40, 0),
text.LeftPadMaxLine(labelsTxt.String(), 5, 0),
@@ -278,7 +278,7 @@ func bugsCompactFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) err
func bugsIDFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error {
for _, b := range bugExcerpts {
- env.Out.Println(b.Id.String())
+ env.Out.Println(b.Id().String())
}
return nil
@@ -286,7 +286,7 @@ func bugsIDFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error {
func bugsDefaultFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error {
for _, b := range bugExcerpts {
- author, err := env.Backend.ResolveIdentityExcerpt(b.AuthorId)
+ author, err := env.Backend.Identities().ResolveExcerpt(b.AuthorId)
if err != nil {
return err
}
@@ -313,7 +313,7 @@ func bugsDefaultFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) err
}
env.Out.Printf("%s\t%s\t%s\t%s\t%s\n",
- colors.Cyan(b.Id.Human()),
+ colors.Cyan(b.Id().Human()),
colors.Yellow(b.Status),
titleFmt+labelsFmt,
colors.Magenta(authorFmt),
@@ -325,7 +325,7 @@ func bugsDefaultFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) err
func bugsPlainFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) error {
for _, b := range bugExcerpts {
- env.Out.Printf("%s [%s] %s\n", b.Id.Human(), b.Status, strings.TrimSpace(b.Title))
+ env.Out.Printf("%s [%s] %s\n", b.Id().Human(), b.Status, strings.TrimSpace(b.Title))
}
return nil
}
@@ -353,7 +353,7 @@ func bugsOrgmodeFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) err
title = b.Title
}
- author, err := env.Backend.ResolveIdentityExcerpt(b.AuthorId)
+ author, err := env.Backend.Identities().ResolveExcerpt(b.AuthorId)
if err != nil {
return err
}
@@ -370,7 +370,7 @@ func bugsOrgmodeFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) err
env.Out.Printf("* %-6s %s %s %s: %s %s\n",
status,
- b.Id.Human(),
+ b.Id().Human(),
formatTime(b.CreateTime()),
author.DisplayName(),
title,
@@ -381,26 +381,26 @@ func bugsOrgmodeFormatter(env *execenv.Env, bugExcerpts []*cache.BugExcerpt) err
env.Out.Printf("** Actors:\n")
for _, element := range b.Actors {
- actor, err := env.Backend.ResolveIdentityExcerpt(element)
+ actor, err := env.Backend.Identities().ResolveExcerpt(element)
if err != nil {
return err
}
env.Out.Printf(": %s %s\n",
- actor.Id.Human(),
+ actor.Id().Human(),
actor.DisplayName(),
)
}
env.Out.Printf("** Participants:\n")
for _, element := range b.Participants {
- participant, err := env.Backend.ResolveIdentityExcerpt(element)
+ participant, err := env.Backend.Identities().ResolveExcerpt(element)
if err != nil {
return err
}
env.Out.Printf(": %s %s\n",
- participant.Id.Human(),
+ participant.Id().Human(),
participant.DisplayName(),
)
}
diff --git a/commands/bug/bug_comment_edit.go b/commands/bug/bug_comment_edit.go
index 8be7cb80..2a0289f5 100644
--- a/commands/bug/bug_comment_edit.go
+++ b/commands/bug/bug_comment_edit.go
@@ -41,7 +41,7 @@ func newBugCommentEditCommand() *cobra.Command {
}
func runBugCommentEdit(env *execenv.Env, opts bugCommentEditOptions, args []string) error {
- b, commentId, err := env.Backend.ResolveComment(args[0])
+ b, commentId, err := env.Backend.Bugs().ResolveComment(args[0])
if err != nil {
return err
}
diff --git a/commands/bug/bug_new.go b/commands/bug/bug_new.go
index 4f73a09c..fbfb9def 100644
--- a/commands/bug/bug_new.go
+++ b/commands/bug/bug_new.go
@@ -63,7 +63,7 @@ func runBugNew(env *execenv.Env, opts bugNewOptions) error {
}
}
- b, _, err := env.Backend.NewBug(
+ b, _, err := env.Backend.Bugs().New(
text.CleanupOneLine(opts.title),
text.Cleanup(opts.message),
)
diff --git a/commands/bug/bug_rm.go b/commands/bug/bug_rm.go
index 1d2a7524..04881d54 100644
--- a/commands/bug/bug_rm.go
+++ b/commands/bug/bug_rm.go
@@ -34,7 +34,7 @@ func runBugRm(env *execenv.Env, args []string) (err error) {
return errors.New("you must provide a bug prefix to remove")
}
- err = env.Backend.RemoveBug(args[0])
+ err = env.Backend.Bugs().Remove(args[0])
if err != nil {
return
diff --git a/commands/bug/bug_select.go b/commands/bug/bug_select.go
index 0b1cb15c..2a4d1201 100644
--- a/commands/bug/bug_select.go
+++ b/commands/bug/bug_select.go
@@ -46,7 +46,7 @@ func runBugSelect(env *execenv.Env, args []string) error {
prefix := args[0]
- b, err := env.Backend.ResolveBugPrefix(prefix)
+ b, err := env.Backend.Bugs().ResolvePrefix(prefix)
if err != nil {
return err
}
diff --git a/commands/bug/select/select.go b/commands/bug/select/select.go
index 908ad58c..7096dde4 100644
--- a/commands/bug/select/select.go
+++ b/commands/bug/select/select.go
@@ -9,7 +9,6 @@ import (
"github.com/pkg/errors"
"github.com/MichaelMure/git-bug/cache"
- "github.com/MichaelMure/git-bug/entities/bug"
"github.com/MichaelMure/git-bug/entity"
)
@@ -18,7 +17,7 @@ const selectFile = "select"
var ErrNoValidId = errors.New("you must provide a bug id or use the \"select\" command first")
// ResolveBug first try to resolve a bug using the first argument of the command
-// line. If it fails, it fallback to the select mechanism.
+// line. If it fails, it falls back to the select mechanism.
//
// Returns:
// - the bug if any
@@ -28,13 +27,13 @@ var ErrNoValidId = errors.New("you must provide a bug id or use the \"select\" c
func ResolveBug(repo *cache.RepoCache, args []string) (*cache.BugCache, []string, error) {
// At first, try to use the first argument as a bug prefix
if len(args) > 0 {
- b, err := repo.ResolveBugPrefix(args[0])
+ b, err := repo.Bugs().ResolvePrefix(args[0])
if err == nil {
return b, args[1:], nil
}
- if err != bug.ErrBugNotExist {
+ if !entity.IsErrNotFound(err) {
return nil, nil, err
}
}
@@ -44,7 +43,7 @@ func ResolveBug(repo *cache.RepoCache, args []string) (*cache.BugCache, []string
b, err := selected(repo)
// selected bug is invalid
- if err == bug.ErrBugNotExist {
+ if entity.IsErrNotFound(err) {
// we clear the selected bug
err = Clear(repo)
if err != nil {
@@ -115,7 +114,7 @@ func selected(repo *cache.RepoCache) (*cache.BugCache, error) {
return nil, fmt.Errorf("select file in invalid, removing it")
}
- b, err := repo.ResolveBug(id)
+ b, err := repo.Bugs().Resolve(id)
if err != nil {
return nil, err
}
diff --git a/commands/bug/select/select_test.go b/commands/bug/select/select_test.go
index 702700f4..83ca6643 100644
--- a/commands/bug/select/select_test.go
+++ b/commands/bug/select/select_test.go
@@ -13,7 +13,7 @@ import (
func TestSelect(t *testing.T) {
repo := repository.CreateGoGitTestRepo(t, false)
- repoCache, err := cache.NewRepoCache(repo)
+ repoCache, err := cache.NewRepoCacheNoEvents(repo)
require.NoError(t, err)
_, _, err = ResolveBug(repoCache, []string{})
@@ -28,18 +28,18 @@ func TestSelect(t *testing.T) {
// generate a bunch of bugs
- rene, err := repoCache.NewIdentity("René Descartes", "rene@descartes.fr")
+ rene, err := repoCache.Identities().New("René Descartes", "rene@descartes.fr")
require.NoError(t, err)
for i := 0; i < 10; i++ {
- _, _, err := repoCache.NewBugRaw(rene, time.Now().Unix(), "title", "message", nil, nil)
+ _, _, err := repoCache.Bugs().NewRaw(rene, time.Now().Unix(), "title", "message", nil, nil)
require.NoError(t, err)
}
// and two more for testing
- b1, _, err := repoCache.NewBugRaw(rene, time.Now().Unix(), "title", "message", nil, nil)
+ b1, _, err := repoCache.Bugs().NewRaw(rene, time.Now().Unix(), "title", "message", nil, nil)
require.NoError(t, err)
- b2, _, err := repoCache.NewBugRaw(rene, time.Now().Unix(), "title", "message", nil, nil)
+ b2, _, err := repoCache.Bugs().NewRaw(rene, time.Now().Unix(), "title", "message", nil, nil)
require.NoError(t, err)
err = Select(repoCache, b1.Id())
diff --git a/commands/bug/testenv/testenv.go b/commands/bug/testenv/testenv.go
index 10f20950..acd1f389 100644
--- a/commands/bug/testenv/testenv.go
+++ b/commands/bug/testenv/testenv.go
@@ -19,7 +19,7 @@ func NewTestEnvAndUser(t *testing.T) (*execenv.Env, entity.Id) {
testEnv := execenv.NewTestEnv(t)
- i, err := testEnv.Backend.NewIdentity(testUserName, testUserEmail)
+ i, err := testEnv.Backend.Identities().New(testUserName, testUserEmail)
require.NoError(t, err)
err = testEnv.Backend.SetUserIdentity(i)
@@ -38,7 +38,7 @@ func NewTestEnvAndBug(t *testing.T) (*execenv.Env, entity.Id) {
testEnv, _ := NewTestEnvAndUser(t)
- b, _, err := testEnv.Backend.NewBug(testBugTitle, testBugMessage)
+ b, _, err := testEnv.Backend.Bugs().New(testBugTitle, testBugMessage)
require.NoError(t, err)
return testEnv, b.Id()
@@ -53,7 +53,7 @@ func NewTestEnvAndBugWithComment(t *testing.T) (*execenv.Env, entity.Id, entity.
env, bugID := NewTestEnvAndBug(t)
- b, err := env.Backend.ResolveBug(bugID)
+ b, err := env.Backend.Bugs().Resolve(bugID)
require.NoError(t, err)
commentId, _, err := b.AddComment(testCommentMessage)
diff --git a/commands/cmdjson/json_common.go b/commands/cmdjson/json_common.go
index 60e6e751..34077915 100644
--- a/commands/cmdjson/json_common.go
+++ b/commands/cmdjson/json_common.go
@@ -26,8 +26,8 @@ func NewIdentity(i identity.Interface) Identity {
func NewIdentityFromExcerpt(excerpt *cache.IdentityExcerpt) Identity {
return Identity{
- Id: excerpt.Id.String(),
- HumanId: excerpt.Id.Human(),
+ Id: excerpt.Id().String(),
+ HumanId: excerpt.Id().Human(),
Name: excerpt.Name,
Login: excerpt.Login,
}
diff --git a/commands/completion/helper_completion.go b/commands/completion/helper_completion.go
index 27fbd615..691f0895 100644
--- a/commands/completion/helper_completion.go
+++ b/commands/completion/helper_completion.go
@@ -88,11 +88,11 @@ func Bug(env *execenv.Env) ValidArgsFunction {
}
func bugWithBackend(backend *cache.RepoCache, toComplete string) (completions []string, directives cobra.ShellCompDirective) {
- allIds := backend.AllBugsIds()
+ allIds := backend.Bugs().AllIds()
bugExcerpt := make([]*cache.BugExcerpt, len(allIds))
for i, id := range allIds {
var err error
- bugExcerpt[i], err = backend.ResolveBugExcerpt(id)
+ bugExcerpt[i], err = backend.Bugs().ResolveExcerpt(id)
if err != nil {
return handleError(err)
}
@@ -138,7 +138,7 @@ func BugAndLabels(env *execenv.Env, addOrRemove bool) ValidArgsFunction {
seenLabels[label] = true
}
- allLabels := env.Backend.ValidLabels()
+ allLabels := env.Backend.Bugs().ValidLabels()
labels = make([]bug.Label, 0, len(allLabels))
for _, label := range allLabels {
if !seenLabels[label] {
@@ -200,7 +200,7 @@ func Label(env *execenv.Env) ValidArgsFunction {
_ = env.Backend.Close()
}()
- labels := env.Backend.ValidLabels()
+ labels := env.Backend.Bugs().ValidLabels()
completions = make([]string, len(labels))
for i, label := range labels {
if strings.Contains(label.String(), " ") {
@@ -243,10 +243,10 @@ func Ls(env *execenv.Env) ValidArgsFunction {
if !strings.HasPrefix(toComplete, key) {
continue
}
- ids := env.Backend.AllIdentityIds()
+ ids := env.Backend.Identities().AllIds()
completions = make([]string, len(ids))
for i, id := range ids {
- user, err := env.Backend.ResolveIdentityExcerpt(id)
+ user, err := env.Backend.Identities().ResolveExcerpt(id)
if err != nil {
return handleError(err)
}
@@ -266,7 +266,7 @@ func Ls(env *execenv.Env) ValidArgsFunction {
if !strings.HasPrefix(toComplete, key) {
continue
}
- labels := env.Backend.ValidLabels()
+ labels := env.Backend.Bugs().ValidLabels()
completions = make([]string, len(labels))
for i, label := range labels {
if strings.Contains(label.String(), " ") {
@@ -300,14 +300,14 @@ func User(env *execenv.Env) ValidArgsFunction {
_ = env.Backend.Close()
}()
- ids := env.Backend.AllIdentityIds()
+ ids := env.Backend.Identities().AllIds()
completions = make([]string, len(ids))
for i, id := range ids {
- user, err := env.Backend.ResolveIdentityExcerpt(id)
+ user, err := env.Backend.Identities().ResolveExcerpt(id)
if err != nil {
return handleError(err)
}
- completions[i] = user.Id.Human() + "\t" + user.DisplayName()
+ completions[i] = user.Id().Human() + "\t" + user.DisplayName()
}
return completions, cobra.ShellCompDirectiveNoFileComp
}
@@ -322,10 +322,10 @@ func UserForQuery(env *execenv.Env) ValidArgsFunction {
_ = env.Backend.Close()
}()
- ids := env.Backend.AllIdentityIds()
+ ids := env.Backend.Identities().AllIds()
completions = make([]string, len(ids))
for i, id := range ids {
- user, err := env.Backend.ResolveIdentityExcerpt(id)
+ user, err := env.Backend.Identities().ResolveExcerpt(id)
if err != nil {
return handleError(err)
}
diff --git a/commands/execenv/env.go b/commands/execenv/env.go
index a63f835a..4c6ce204 100644
--- a/commands/execenv/env.go
+++ b/commands/execenv/env.go
@@ -128,11 +128,28 @@ func LoadBackend(env *Env) func(*cobra.Command, []string) error {
return err
}
- env.Backend, err = cache.NewRepoCache(env.Repo)
+ var events chan cache.BuildEvent
+ env.Backend, events, err = cache.NewRepoCache(env.Repo)
if err != nil {
return err
}
+ if events != nil {
+ env.Err.Println("Building cache... ")
+ for event := range events {
+ if event.Err != nil {
+ env.Err.Printf("Cache building error [%s]: %v\n", event.Typename, event.Err)
+ continue
+ }
+ switch event.Event {
+ case cache.BuildEventStarted:
+ env.Err.Printf("[%s] started\n", event.Typename)
+ case cache.BuildEventFinished:
+ env.Err.Printf("[%s] done\n", event.Typename)
+ }
+ }
+ }
+
cleaner := func(env *Env) interrupt.CleanerFunc {
return func() error {
if env.Backend != nil {
diff --git a/commands/execenv/env_testing.go b/commands/execenv/env_testing.go
index 7d9fbd60..5761b410 100644
--- a/commands/execenv/env_testing.go
+++ b/commands/execenv/env_testing.go
@@ -5,9 +5,10 @@ import (
"fmt"
"testing"
+ "github.com/stretchr/testify/require"
+
"github.com/MichaelMure/git-bug/cache"
"github.com/MichaelMure/git-bug/repository"
- "github.com/stretchr/testify/require"
)
type TestOut struct {
@@ -33,8 +34,9 @@ func NewTestEnv(t *testing.T) *Env {
buf := new(bytes.Buffer)
- backend, err := cache.NewRepoCache(repo)
+ backend, err := cache.NewRepoCacheNoEvents(repo)
require.NoError(t, err)
+
t.Cleanup(func() {
backend.Close()
})
diff --git a/commands/label.go b/commands/label.go
index 70090d26..08b9e31f 100644
--- a/commands/label.go
+++ b/commands/label.go
@@ -25,7 +25,7 @@ Note: in the future, a proper label policy could be implemented where valid labe
}
func runLabel(env *execenv.Env) error {
- labels := env.Backend.ValidLabels()
+ labels := env.Backend.Bugs().ValidLabels()
for _, l := range labels {
env.Out.Println(l)
diff --git a/commands/root.go b/commands/root.go
index b28b77b8..d98f9448 100644
--- a/commands/root.go
+++ b/commands/root.go
@@ -61,7 +61,7 @@ the same git remote you are already using to collaborate with other people.
const remoteGroup = "remote"
cmd.AddGroup(&cobra.Group{ID: entityGroup, Title: "Entities"})
- cmd.AddGroup(&cobra.Group{ID: uiGroup, Title: "User interfaces"})
+ cmd.AddGroup(&cobra.Group{ID: uiGroup, Title: "Interactive interfaces"})
cmd.AddGroup(&cobra.Group{ID: remoteGroup, Title: "Interaction with the outside world"})
addCmdWithGroup := func(child *cobra.Command, groupID string) {
diff --git a/commands/user/user.go b/commands/user/user.go
index 191fb828..9a1e477c 100644
--- a/commands/user/user.go
+++ b/commands/user/user.go
@@ -46,10 +46,10 @@ func NewUserCommand() *cobra.Command {
}
func runUser(env *execenv.Env, opts userOptions) error {
- ids := env.Backend.AllIdentityIds()
+ ids := env.Backend.Identities().AllIds()
var users []*cache.IdentityExcerpt
for _, id := range ids {
- user, err := env.Backend.ResolveIdentityExcerpt(id)
+ user, err := env.Backend.Identities().ResolveExcerpt(id)
if err != nil {
return err
}
@@ -69,7 +69,7 @@ func runUser(env *execenv.Env, opts userOptions) error {
func userDefaultFormatter(env *execenv.Env, users []*cache.IdentityExcerpt) error {
for _, user := range users {
env.Out.Printf("%s %s\n",
- colors.Cyan(user.Id.Human()),
+ colors.Cyan(user.Id().Human()),
user.DisplayName(),
)
}
diff --git a/commands/user/user_adopt.go b/commands/user/user_adopt.go
index f5944053..30fdb442 100644
--- a/commands/user/user_adopt.go
+++ b/commands/user/user_adopt.go
@@ -27,7 +27,7 @@ func newUserAdoptCommand() *cobra.Command {
func runUserAdopt(env *execenv.Env, args []string) error {
prefix := args[0]
- i, err := env.Backend.ResolveIdentityPrefix(prefix)
+ i, err := env.Backend.Identities().ResolvePrefix(prefix)
if err != nil {
return err
}
diff --git a/commands/user/user_new.go b/commands/user/user_new.go
index d7224512..7b287492 100644
--- a/commands/user/user_new.go
+++ b/commands/user/user_new.go
@@ -69,7 +69,7 @@ func runUserNew(env *execenv.Env, opts userNewOptions) error {
}
}
- id, err := env.Backend.NewIdentityRaw(opts.name, opts.email, "", opts.avatarURL, nil, nil)
+ id, err := env.Backend.Identities().NewRaw(opts.name, opts.email, "", opts.avatarURL, nil, nil)
if err != nil {
return err
}
diff --git a/commands/user/user_show.go b/commands/user/user_show.go
index 36c09e8e..225d0ef4 100644
--- a/commands/user/user_show.go
+++ b/commands/user/user_show.go
@@ -49,7 +49,7 @@ func runUserShow(env *execenv.Env, opts userShowOptions, args []string) error {
var id *cache.IdentityCache
var err error
if len(args) == 1 {
- id, err = env.Backend.ResolveIdentityPrefix(args[0])
+ id, err = env.Backend.Identities().ResolvePrefix(args[0])
} else {
id, err = env.Backend.GetUserIdentity()
}
diff --git a/commands/webui.go b/commands/webui.go
index a2a01645..ea9fde0c 100644
--- a/commands/webui.go
+++ b/commands/webui.go
@@ -106,11 +106,27 @@ func runWebUI(env *execenv.Env, opts webUIOptions) error {
}
mrc := cache.NewMultiRepoCache()
- _, err := mrc.RegisterDefaultRepository(env.Repo)
+ _, events, err := mrc.RegisterDefaultRepository(env.Repo)
if err != nil {
return err
}
+ if events != nil {
+ env.Err.Println("Building cache... ")
+ for event := range events {
+ if event.Err != nil {
+ env.Err.Printf("Cache building error [%s]: %v\n", event.Typename, event.Err)
+ continue
+ }
+ switch event.Event {
+ case cache.BuildEventStarted:
+ env.Err.Printf("[%s] started\n", event.Typename)
+ case cache.BuildEventFinished:
+ env.Err.Printf("[%s] done\n", event.Typename)
+ }
+ }
+ }
+
var errOut io.Writer
if opts.logErrors {
errOut = env.Err
diff --git a/doc/README.md b/doc/README.md
index cf9fd845..e172ffd6 100644
--- a/doc/README.md
+++ b/doc/README.md
@@ -2,14 +2,14 @@
## For users
-- [data model](model.md) describe how the data model works and why.
-- [query language](queries.md) describe git-bug's query language.
+- [data model](model.md) describes how the data model works and why.
+- [query language](queries.md) describes git-bug's query language.
- [How-to: Read and edit offline your Github/Gitlab/Jira issues with git-bug](howto-github.md)
## For developers
-- :exclamation: [data model](model.md) describe how the data model works and why.
+- :exclamation: [data model](model.md) describes how the data model works and why.
- :exclamation: [internal bird-view](architecture.md) gives an overview of the project architecture.
-- :exclamation: [Entity/DAG](../entity/dag/example_test.go) explain how to easily make your own distributed entity in git.
-- [query language](queries.md) describe git-bug's query language.
-- [JIRA bridge de v notes](jira_bridge.md) \ No newline at end of file
+- :exclamation: [Entity/DAG](../entity/dag/example_test.go) explains how to easily make your own distributed entity in git.
+- [query language](queries.md) describes git-bug's query language.
+- [JIRA bridge dev notes](jira_bridge.md)
diff --git a/entities/bug/bug.go b/entities/bug/bug.go
index b0f46c0b..deb00c7c 100644
--- a/entities/bug/bug.go
+++ b/entities/bug/bug.go
@@ -20,9 +20,12 @@ var _ entity.Interface = &Bug{}
// 4: with DAG entity framework
const formatVersion = 4
+const Typename = "bug"
+const Namespace = "bugs"
+
var def = dag.Definition{
- Typename: "bug",
- Namespace: "bugs",
+ Typename: Typename,
+ Namespace: Namespace,
OperationUnmarshaler: operationUnmarshaler,
FormatVersion: formatVersion,
}
@@ -42,9 +45,11 @@ type Bug struct {
// NewBug create a new Bug
func NewBug() *Bug {
- return &Bug{
- Entity: dag.New(def),
- }
+ return wrapper(dag.New(def))
+}
+
+func wrapper(e *dag.Entity) *Bug {
+ return &Bug{Entity: e}
}
func simpleResolvers(repo repository.ClockedRepo) entity.Resolvers {
@@ -60,49 +65,17 @@ func Read(repo repository.ClockedRepo, id entity.Id) (*Bug, error) {
// ReadWithResolver will read a bug from its Id, with custom resolvers
func ReadWithResolver(repo repository.ClockedRepo, resolvers entity.Resolvers, id entity.Id) (*Bug, error) {
- e, err := dag.Read(def, repo, resolvers, id)
- if err != nil {
- return nil, err
- }
- return &Bug{Entity: e}, nil
-}
-
-type StreamedBug struct {
- Bug *Bug
- Err error
+ return dag.Read(def, wrapper, repo, resolvers, id)
}
// ReadAll read and parse all local bugs
-func ReadAll(repo repository.ClockedRepo) <-chan StreamedBug {
- return readAll(repo, simpleResolvers(repo))
+func ReadAll(repo repository.ClockedRepo) <-chan entity.StreamedEntity[*Bug] {
+ return dag.ReadAll(def, wrapper, repo, simpleResolvers(repo))
}
// ReadAllWithResolver read and parse all local bugs
-func ReadAllWithResolver(repo repository.ClockedRepo, resolvers entity.Resolvers) <-chan StreamedBug {
- return readAll(repo, resolvers)
-}
-
-// Read and parse all available bug with a given ref prefix
-func readAll(repo repository.ClockedRepo, resolvers entity.Resolvers) <-chan StreamedBug {
- out := make(chan StreamedBug)
-
- go func() {
- defer close(out)
-
- for streamedEntity := range dag.ReadAll(def, repo, resolvers) {
- if streamedEntity.Err != nil {
- out <- StreamedBug{
- Err: streamedEntity.Err,
- }
- } else {
- out <- StreamedBug{
- Bug: &Bug{Entity: streamedEntity.Entity},
- }
- }
- }
- }()
-
- return out
+func ReadAllWithResolver(repo repository.ClockedRepo, resolvers entity.Resolvers) <-chan entity.StreamedEntity[*Bug] {
+ return dag.ReadAll(def, wrapper, repo, resolvers)
}
// ListLocalIds list all the available local bug ids
diff --git a/entities/bug/bug_actions.go b/entities/bug/bug_actions.go
index c25b9243..198e4ed0 100644
--- a/entities/bug/bug_actions.go
+++ b/entities/bug/bug_actions.go
@@ -23,33 +23,14 @@ func Push(repo repository.Repo, remote string) (string, error) {
// Note: an author is necessary for the case where a merge commit is created, as this commit will
// have an author and may be signed if a signing key is available.
func Pull(repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, mergeAuthor identity.Interface) error {
- return dag.Pull(def, repo, resolvers, remote, mergeAuthor)
+ return dag.Pull(def, wrapper, repo, resolvers, remote, mergeAuthor)
}
// MergeAll will merge all the available remote bug
// Note: an author is necessary for the case where a merge commit is created, as this commit will
// have an author and may be signed if a signing key is available.
func MergeAll(repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, mergeAuthor identity.Interface) <-chan entity.MergeResult {
- out := make(chan entity.MergeResult)
-
- go func() {
- defer close(out)
-
- results := dag.MergeAll(def, repo, resolvers, remote, mergeAuthor)
-
- // wrap the dag.Entity into a complete Bug
- for result := range results {
- result := result
- if result.Entity != nil {
- result.Entity = &Bug{
- Entity: result.Entity.(*dag.Entity),
- }
- }
- out <- result
- }
- }()
-
- return out
+ return dag.MergeAll(def, wrapper, repo, resolvers, remote, mergeAuthor)
}
// Remove will remove a local bug from its entity.Id
diff --git a/entities/bug/err.go b/entities/bug/err.go
deleted file mode 100644
index 1bd174bb..00000000
--- a/entities/bug/err.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package bug
-
-import (
- "errors"
-
- "github.com/MichaelMure/git-bug/entity"
-)
-
-var ErrBugNotExist = errors.New("bug doesn't exist")
-
-func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch {
- return entity.NewErrMultipleMatch("bug", matching)
-}
-
-func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch {
- return entity.NewErrMultipleMatch("operation", matching)
-}
diff --git a/entities/bug/operation.go b/entities/bug/operation.go
index 41d80700..04365046 100644
--- a/entities/bug/operation.go
+++ b/entities/bug/operation.go
@@ -21,12 +21,7 @@ const (
)
// Operation define the interface to fulfill for an edit operation of a Bug
-type Operation interface {
- dag.Operation
-
- // Apply the operation to a Snapshot to create the final state
- Apply(snapshot *Snapshot)
-}
+type Operation = dag.OperationWithApply[*Snapshot]
// make sure that package external operations do conform to our interface
var _ Operation = &dag.NoOpOperation[*Snapshot]{}
diff --git a/entities/bug/resolver.go b/entities/bug/resolver.go
index e7beb0e4..b0a05917 100644
--- a/entities/bug/resolver.go
+++ b/entities/bug/resolver.go
@@ -16,6 +16,6 @@ func NewSimpleResolver(repo repository.ClockedRepo) *SimpleResolver {
return &SimpleResolver{repo: repo}
}
-func (r *SimpleResolver) Resolve(id entity.Id) (entity.Interface, error) {
+func (r *SimpleResolver) Resolve(id entity.Id) (entity.Resolved, error) {
return Read(r.repo, id)
}
diff --git a/entities/bug/snapshot.go b/entities/bug/snapshot.go
index 333fe207..5c260d85 100644
--- a/entities/bug/snapshot.go
+++ b/entities/bug/snapshot.go
@@ -43,6 +43,10 @@ func (snap *Snapshot) AllOperations() []dag.Operation {
return snap.Operations
}
+func (snap *Snapshot) AppendOperation(op dag.Operation) {
+ snap.Operations = append(snap.Operations, op)
+}
+
// EditTime returns the last time a bug was modified
func (snap *Snapshot) EditTime() time.Time {
if len(snap.Operations) == 0 {
diff --git a/entities/bug/with_snapshot.go b/entities/bug/with_snapshot.go
deleted file mode 100644
index 0474cac7..00000000
--- a/entities/bug/with_snapshot.go
+++ /dev/null
@@ -1,53 +0,0 @@
-package bug
-
-import (
- "github.com/MichaelMure/git-bug/repository"
-)
-
-var _ Interface = &WithSnapshot{}
-
-// WithSnapshot encapsulate a Bug and maintain the corresponding Snapshot efficiently
-type WithSnapshot struct {
- *Bug
- snap *Snapshot
-}
-
-func (b *WithSnapshot) Compile() *Snapshot {
- if b.snap == nil {
- snap := b.Bug.Compile()
- b.snap = snap
- }
- return b.snap
-}
-
-// Append intercept Bug.Append() to update the snapshot efficiently
-func (b *WithSnapshot) Append(op Operation) {
- b.Bug.Append(op)
-
- if b.snap == nil {
- return
- }
-
- op.Apply(b.snap)
- b.snap.Operations = append(b.snap.Operations, op)
-}
-
-// Commit intercept Bug.Commit() to update the snapshot efficiently
-func (b *WithSnapshot) Commit(repo repository.ClockedRepo) error {
- err := b.Bug.Commit(repo)
-
- if err != nil {
- b.snap = nil
- return err
- }
-
- // Commit() shouldn't change anything of the bug state apart from the
- // initial ID set
-
- if b.snap == nil {
- return nil
- }
-
- b.snap.id = b.Bug.Id()
- return nil
-}
diff --git a/entities/identity/common.go b/entities/identity/common.go
index 5c6445e9..88e30e33 100644
--- a/entities/identity/common.go
+++ b/entities/identity/common.go
@@ -2,18 +2,9 @@ package identity
import (
"encoding/json"
- "errors"
"fmt"
-
- "github.com/MichaelMure/git-bug/entity"
)
-var ErrIdentityNotExist = errors.New("identity doesn't exist")
-
-func NewErrMultipleMatch(matching []entity.Id) *entity.ErrMultipleMatch {
- return entity.NewErrMultipleMatch("identity", matching)
-}
-
// Custom unmarshaling function to allow package user to delegate
// the decoding of an Identity and distinguish between an Identity
// and a Bare.
diff --git a/entities/identity/identity.go b/entities/identity/identity.go
index d497dbcc..b0cee43b 100644
--- a/entities/identity/identity.go
+++ b/entities/identity/identity.go
@@ -19,16 +19,15 @@ const identityRemoteRefPattern = "refs/remotes/%s/identities/"
const versionEntryName = "version"
const identityConfigKey = "git-bug.identity"
+const Typename = "identity"
+const Namespace = "identities"
+
var ErrNonFastForwardMerge = errors.New("non fast-forward identity merge")
var ErrNoIdentitySet = errors.New("No identity is set.\n" +
"To interact with bugs, an identity first needs to be created using " +
"\"git bug user new\" or adopted with \"git bug user adopt\"")
var ErrMultipleIdentitiesSet = errors.New("multiple user identities set")
-func NewErrMultipleMatchIdentity(matching []entity.Id) *entity.ErrMultipleMatch {
- return entity.NewErrMultipleMatch("identity", matching)
-}
-
var _ Interface = &Identity{}
var _ entity.Interface = &Identity{}
@@ -109,7 +108,7 @@ func read(repo repository.Repo, ref string) (*Identity, error) {
hashes, err := repo.ListCommits(ref)
if err != nil {
- return nil, ErrIdentityNotExist
+ return nil, entity.NewErrNotFound(Typename)
}
if len(hashes) == 0 {
return nil, fmt.Errorf("empty identity")
@@ -174,7 +173,7 @@ func RemoveIdentity(repo repository.ClockedRepo, id entity.Id) error {
return err
}
if len(refs) > 1 {
- return NewErrMultipleMatchIdentity(entity.RefsToIds(refs))
+ return entity.NewErrMultipleMatch(Typename, entity.RefsToIds(refs))
}
if len(refs) == 1 {
// we have the identity locally
@@ -193,7 +192,7 @@ func RemoveIdentity(repo repository.ClockedRepo, id entity.Id) error {
return err
}
if len(remoteRefs) > 1 {
- return NewErrMultipleMatchIdentity(entity.RefsToIds(refs))
+ return entity.NewErrMultipleMatch(Typename, entity.RefsToIds(refs))
}
if len(remoteRefs) == 1 {
// found the identity in a remote
@@ -202,7 +201,7 @@ func RemoveIdentity(repo repository.ClockedRepo, id entity.Id) error {
}
if len(fullMatches) == 0 {
- return ErrIdentityNotExist
+ return entity.NewErrNotFound(Typename)
}
for _, ref := range fullMatches {
@@ -215,44 +214,39 @@ func RemoveIdentity(repo repository.ClockedRepo, id entity.Id) error {
return nil
}
-type StreamedIdentity struct {
- Identity *Identity
- Err error
-}
-
// ReadAllLocal read and parse all local Identity
-func ReadAllLocal(repo repository.ClockedRepo) <-chan StreamedIdentity {
+func ReadAllLocal(repo repository.ClockedRepo) <-chan entity.StreamedEntity[*Identity] {
return readAll(repo, identityRefPattern)
}
// ReadAllRemote read and parse all remote Identity for a given remote
-func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan StreamedIdentity {
+func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan entity.StreamedEntity[*Identity] {
refPrefix := fmt.Sprintf(identityRemoteRefPattern, remote)
return readAll(repo, refPrefix)
}
// readAll read and parse all available bug with a given ref prefix
-func readAll(repo repository.ClockedRepo, refPrefix string) <-chan StreamedIdentity {
- out := make(chan StreamedIdentity)
+func readAll(repo repository.ClockedRepo, refPrefix string) <-chan entity.StreamedEntity[*Identity] {
+ out := make(chan entity.StreamedEntity[*Identity])
go func() {
defer close(out)
refs, err := repo.ListRefs(refPrefix)
if err != nil {
- out <- StreamedIdentity{Err: err}
+ out <- entity.StreamedEntity[*Identity]{Err: err}
return
}
for _, ref := range refs {
- b, err := read(repo, ref)
+ i, err := read(repo, ref)
if err != nil {
- out <- StreamedIdentity{Err: err}
+ out <- entity.StreamedEntity[*Identity]{Err: err}
return
}
- out <- StreamedIdentity{Identity: b}
+ out <- entity.StreamedEntity[*Identity]{Entity: i}
}
}()
@@ -308,7 +302,7 @@ func (i *Identity) Mutate(repo repository.RepoClock, f func(orig *Mutator)) erro
return nil
}
-// Write the identity into the Repository. In particular, this ensure that
+// Commit write the identity into the Repository. In particular, this ensures that
// the Id is properly set.
func (i *Identity) Commit(repo repository.ClockedRepo) error {
if !i.NeedCommit() {
diff --git a/entities/identity/identity_actions.go b/entities/identity/identity_actions.go
index b58bb2d9..13776078 100644
--- a/entities/identity/identity_actions.go
+++ b/entities/identity/identity_actions.go
@@ -13,12 +13,12 @@ import (
// Fetch retrieve updates from a remote
// This does not change the local identities state
func Fetch(repo repository.Repo, remote string) (string, error) {
- return repo.FetchRefs(remote, "identities")
+ return repo.FetchRefs(remote, Namespace)
}
// Push update a remote with the local changes
func Push(repo repository.Repo, remote string) (string, error) {
- return repo.PushRefs(remote, "identities")
+ return repo.PushRefs(remote, Namespace)
}
// Pull will do a Fetch + MergeAll
diff --git a/entities/identity/identity_actions_test.go b/entities/identity/identity_actions_test.go
index 351fb7a4..e9626cb9 100644
--- a/entities/identity/identity_actions_test.go
+++ b/entities/identity/identity_actions_test.go
@@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/require"
+ "github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/repository"
)
@@ -145,13 +146,13 @@ func TestIdentityPushPull(t *testing.T) {
}
}
-func allIdentities(t testing.TB, identities <-chan StreamedIdentity) []*Identity {
+func allIdentities(t testing.TB, identities <-chan entity.StreamedEntity[*Identity]) []*Identity {
var result []*Identity
for streamed := range identities {
if streamed.Err != nil {
t.Fatal(streamed.Err)
}
- result = append(result, streamed.Identity)
+ result = append(result, streamed.Entity)
}
return result
}
diff --git a/entities/identity/identity_test.go b/entities/identity/identity_test.go
index f0c3bbe9..0ecc8058 100644
--- a/entities/identity/identity_test.go
+++ b/entities/identity/identity_test.go
@@ -6,6 +6,7 @@ import (
"github.com/stretchr/testify/require"
+ "github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/repository"
"github.com/MichaelMure/git-bug/util/lamport"
)
@@ -278,13 +279,13 @@ func TestIdentityRemove(t *testing.T) {
require.NoError(t, err)
_, err = ReadLocal(repo, rene.Id())
- require.Error(t, ErrIdentityNotExist, err)
+ require.ErrorAs(t, entity.ErrNotFound{}, err)
_, err = ReadRemote(repo, "remoteA", string(rene.Id()))
- require.Error(t, ErrIdentityNotExist, err)
+ require.ErrorAs(t, entity.ErrNotFound{}, err)
_, err = ReadRemote(repo, "remoteB", string(rene.Id()))
- require.Error(t, ErrIdentityNotExist, err)
+ require.ErrorAs(t, entity.ErrNotFound{}, err)
ids, err := ListLocalIds(repo)
require.NoError(t, err)
diff --git a/entities/identity/identity_user.go b/entities/identity/identity_user.go
index cbbb8974..7eb374d4 100644
--- a/entities/identity/identity_user.go
+++ b/entities/identity/identity_user.go
@@ -23,7 +23,7 @@ func GetUserIdentity(repo repository.Repo) (*Identity, error) {
}
i, err := ReadLocal(repo, id)
- if err == ErrIdentityNotExist {
+ if entity.IsErrNotFound(err) {
innerErr := repo.LocalConfig().RemoveAll(identityConfigKey)
if innerErr != nil {
_, _ = fmt.Fprintln(os.Stderr, errors.Wrap(innerErr, "can't clear user identity").Error())
diff --git a/entities/identity/resolver.go b/entities/identity/resolver.go
index 5468a8f8..a4b676f3 100644
--- a/entities/identity/resolver.go
+++ b/entities/identity/resolver.go
@@ -16,19 +16,6 @@ func NewSimpleResolver(repo repository.Repo) *SimpleResolver {
return &SimpleResolver{repo: repo}
}
-func (r *SimpleResolver) Resolve(id entity.Id) (entity.Interface, error) {
+func (r *SimpleResolver) Resolve(id entity.Id) (entity.Resolved, error) {
return ReadLocal(r.repo, id)
}
-
-var _ entity.Resolver = &StubResolver{}
-
-// StubResolver is a Resolver that doesn't load anything, only returning IdentityStub instances
-type StubResolver struct{}
-
-func NewStubResolver() *StubResolver {
- return &StubResolver{}
-}
-
-func (s *StubResolver) Resolve(id entity.Id) (entity.Interface, error) {
- return &IdentityStub{id: id}, nil
-}
diff --git a/entity/dag/common_test.go b/entity/dag/common_test.go
index f78b09e9..51acfa49 100644
--- a/entity/dag/common_test.go
+++ b/entity/dag/common_test.go
@@ -88,6 +88,18 @@ func unmarshaler(raw json.RawMessage, resolvers entity.Resolvers) (Operation, er
}
/*
+ Entity
+*/
+
+type Foo struct {
+ *Entity
+}
+
+func wrapper(e *Entity) *Foo {
+ return &Foo{Entity: e}
+}
+
+/*
Identities + repo + definition
*/
diff --git a/entity/dag/entity.go b/entity/dag/entity.go
index ca674ad7..2028e1b4 100644
--- a/entity/dag/entity.go
+++ b/entity/dag/entity.go
@@ -59,32 +59,35 @@ func New(definition Definition) *Entity {
}
// Read will read and decode a stored local Entity from a repository
-func Read(def Definition, repo repository.ClockedRepo, resolvers entity.Resolvers, id entity.Id) (*Entity, error) {
+func Read[EntityT entity.Interface](def Definition, wrapper func(e *Entity) EntityT, repo repository.ClockedRepo, resolvers entity.Resolvers, id entity.Id) (EntityT, error) {
if err := id.Validate(); err != nil {
- return nil, errors.Wrap(err, "invalid id")
+ return *new(EntityT), errors.Wrap(err, "invalid id")
}
ref := fmt.Sprintf("refs/%s/%s", def.Namespace, id.String())
- return read(def, repo, resolvers, ref)
+ return read[EntityT](def, wrapper, repo, resolvers, ref)
}
// readRemote will read and decode a stored remote Entity from a repository
-func readRemote(def Definition, repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, id entity.Id) (*Entity, error) {
+func readRemote[EntityT entity.Interface](def Definition, wrapper func(e *Entity) EntityT, repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, id entity.Id) (EntityT, error) {
if err := id.Validate(); err != nil {
- return nil, errors.Wrap(err, "invalid id")
+ return *new(EntityT), errors.Wrap(err, "invalid id")
}
ref := fmt.Sprintf("refs/remotes/%s/%s/%s", def.Namespace, remote, id.String())
- return read(def, repo, resolvers, ref)
+ return read[EntityT](def, wrapper, repo, resolvers, ref)
}
// read fetch from git and decode an Entity at an arbitrary git reference.
-func read(def Definition, repo repository.ClockedRepo, resolvers entity.Resolvers, ref string) (*Entity, error) {
+func read[EntityT entity.Interface](def Definition, wrapper func(e *Entity) EntityT, repo repository.ClockedRepo, resolvers entity.Resolvers, ref string) (EntityT, error) {
rootHash, err := repo.ResolveRef(ref)
+ if err == repository.ErrNotFound {
+ return *new(EntityT), entity.NewErrNotFound(def.Typename)
+ }
if err != nil {
- return nil, err
+ return *new(EntityT), err
}
// Perform a breadth-first search to get a topological order of the DAG where we discover the
@@ -104,7 +107,7 @@ func read(def Definition, repo repository.ClockedRepo, resolvers entity.Resolver
commit, err := repo.ReadCommit(hash)
if err != nil {
- return nil, err
+ return *new(EntityT), err
}
BFSOrder = append(BFSOrder, commit)
@@ -137,26 +140,26 @@ func read(def Definition, repo repository.ClockedRepo, resolvers entity.Resolver
// can have no parents. Said otherwise, the DAG need to have exactly
// one leaf.
if !isFirstCommit && len(commit.Parents) == 0 {
- return nil, fmt.Errorf("multiple leafs in the entity DAG")
+ return *new(EntityT), fmt.Errorf("multiple leafs in the entity DAG")
}
opp, err := readOperationPack(def, repo, resolvers, commit)
if err != nil {
- return nil, err
+ return *new(EntityT), err
}
err = opp.Validate()
if err != nil {
- return nil, err
+ return *new(EntityT), err
}
if isMerge && len(opp.Operations) > 0 {
- return nil, fmt.Errorf("merge commit cannot have operations")
+ return *new(EntityT), fmt.Errorf("merge commit cannot have operations")
}
// Check that the create lamport clock is set (not checked in Validate() as it's optional)
if isFirstCommit && opp.CreateTime <= 0 {
- return nil, fmt.Errorf("creation lamport time not set")
+ return *new(EntityT), fmt.Errorf("creation lamport time not set")
}
// make sure that the lamport clocks causality match the DAG topology
@@ -167,7 +170,7 @@ func read(def Definition, repo repository.ClockedRepo, resolvers entity.Resolver
}
if parentPack.EditTime >= opp.EditTime {
- return nil, fmt.Errorf("lamport clock ordering doesn't match the DAG")
+ return *new(EntityT), fmt.Errorf("lamport clock ordering doesn't match the DAG")
}
// to avoid an attack where clocks are pushed toward the uint64 rollover, make sure
@@ -175,7 +178,7 @@ func read(def Definition, repo repository.ClockedRepo, resolvers entity.Resolver
// we ignore merge commits here to allow merging after a loooong time without breaking anything,
// as long as there is one valid chain of small hops, it's fine.
if !isMerge && opp.EditTime-parentPack.EditTime > 1_000_000 {
- return nil, fmt.Errorf("lamport clock jumping too far in the future, likely an attack")
+ return *new(EntityT), fmt.Errorf("lamport clock jumping too far in the future, likely an attack")
}
}
@@ -187,11 +190,11 @@ func read(def Definition, repo repository.ClockedRepo, resolvers entity.Resolver
for _, opp := range oppMap {
err = repo.Witness(fmt.Sprintf(creationClockPattern, def.Namespace), opp.CreateTime)
if err != nil {
- return nil, err
+ return *new(EntityT), err
}
err = repo.Witness(fmt.Sprintf(editClockPattern, def.Namespace), opp.EditTime)
if err != nil {
- return nil, err
+ return *new(EntityT), err
}
}
@@ -232,13 +235,13 @@ func read(def Definition, repo repository.ClockedRepo, resolvers entity.Resolver
}
}
- return &Entity{
+ return wrapper(&Entity{
Definition: def,
ops: ops,
lastCommit: rootHash,
createTime: createTime,
editTime: editTime,
- }, nil
+ }), nil
}
// readClockNoCheck fetch from git, read and witness the clocks of an Entity at an arbitrary git reference.
@@ -247,6 +250,9 @@ func read(def Definition, repo repository.ClockedRepo, resolvers entity.Resolver
// operation blobs can be implemented instead.
func readClockNoCheck(def Definition, repo repository.ClockedRepo, ref string) error {
rootHash, err := repo.ResolveRef(ref)
+ if err == repository.ErrNotFound {
+ return entity.NewErrNotFound(def.Typename)
+ }
if err != nil {
return err
}
@@ -293,14 +299,9 @@ func readClockNoCheck(def Definition, repo repository.ClockedRepo, ref string) e
return nil
}
-type StreamedEntity struct {
- Entity *Entity
- Err error
-}
-
// ReadAll read and parse all local Entity
-func ReadAll(def Definition, repo repository.ClockedRepo, resolvers entity.Resolvers) <-chan StreamedEntity {
- out := make(chan StreamedEntity)
+func ReadAll[EntityT entity.Interface](def Definition, wrapper func(e *Entity) EntityT, repo repository.ClockedRepo, resolvers entity.Resolvers) <-chan entity.StreamedEntity[EntityT] {
+ out := make(chan entity.StreamedEntity[EntityT])
go func() {
defer close(out)
@@ -309,19 +310,19 @@ func ReadAll(def Definition, repo repository.ClockedRepo, resolvers entity.Resol
refs, err := repo.ListRefs(refPrefix)
if err != nil {
- out <- StreamedEntity{Err: err}
+ out <- entity.StreamedEntity[EntityT]{Err: err}
return
}
for _, ref := range refs {
- e, err := read(def, repo, resolvers, ref)
+ e, err := read[EntityT](def, wrapper, repo, resolvers, ref)
if err != nil {
- out <- StreamedEntity{Err: err}
+ out <- entity.StreamedEntity[EntityT]{Err: err}
return
}
- out <- StreamedEntity{Entity: e}
+ out <- entity.StreamedEntity[EntityT]{Entity: e}
}
}()
diff --git a/entity/dag/entity_actions.go b/entity/dag/entity_actions.go
index c971f316..2a2bf87f 100644
--- a/entity/dag/entity_actions.go
+++ b/entity/dag/entity_actions.go
@@ -32,13 +32,13 @@ func Push(def Definition, repo repository.Repo, remote string) (string, error) {
// Pull will do a Fetch + MergeAll
// Contrary to MergeAll, this function will return an error if a merge fail.
-func Pull(def Definition, repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, author identity.Interface) error {
+func Pull[EntityT entity.Interface](def Definition, wrapper func(e *Entity) EntityT, repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, author identity.Interface) error {
_, err := Fetch(def, repo, remote)
if err != nil {
return err
}
- for merge := range MergeAll(def, repo, resolvers, remote, author) {
+ for merge := range MergeAll(def, wrapper, repo, resolvers, remote, author) {
if merge.Err != nil {
return merge.Err
}
@@ -68,7 +68,7 @@ func Pull(def Definition, repo repository.ClockedRepo, resolvers entity.Resolver
//
// Note: an author is necessary for the case where a merge commit is created, as this commit will
// have an author and may be signed if a signing key is available.
-func MergeAll(def Definition, repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, author identity.Interface) <-chan entity.MergeResult {
+func MergeAll[EntityT entity.Interface](def Definition, wrapper func(e *Entity) EntityT, repo repository.ClockedRepo, resolvers entity.Resolvers, remote string, author identity.Interface) <-chan entity.MergeResult {
out := make(chan entity.MergeResult)
go func() {
@@ -82,7 +82,7 @@ func MergeAll(def Definition, repo repository.ClockedRepo, resolvers entity.Reso
}
for _, remoteRef := range remoteRefs {
- out <- merge(def, repo, resolvers, remoteRef, author)
+ out <- merge[EntityT](def, wrapper, repo, resolvers, remoteRef, author)
}
}()
@@ -91,14 +91,14 @@ func MergeAll(def Definition, repo repository.ClockedRepo, resolvers entity.Reso
// merge perform a merge to make sure a local Entity is up-to-date.
// See MergeAll for more details.
-func merge(def Definition, repo repository.ClockedRepo, resolvers entity.Resolvers, remoteRef string, author identity.Interface) entity.MergeResult {
+func merge[EntityT entity.Interface](def Definition, wrapper func(e *Entity) EntityT, repo repository.ClockedRepo, resolvers entity.Resolvers, remoteRef string, author identity.Interface) entity.MergeResult {
id := entity.RefToId(remoteRef)
if err := id.Validate(); err != nil {
return entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error())
}
- remoteEntity, err := read(def, repo, resolvers, remoteRef)
+ remoteEntity, err := read[EntityT](def, wrapper, repo, resolvers, remoteRef)
if err != nil {
return entity.NewMergeInvalidStatus(id,
errors.Wrapf(err, "remote %s is not readable", def.Typename).Error())
@@ -197,7 +197,7 @@ func merge(def Definition, repo repository.ClockedRepo, resolvers entity.Resolve
// an empty operationPack.
// First step is to collect those clocks.
- localEntity, err := read(def, repo, resolvers, localRef)
+ localEntity, err := read[EntityT](def, wrapper, repo, resolvers, localRef)
if err != nil {
return entity.NewMergeError(err, id)
}
diff --git a/entity/dag/entity_actions_test.go b/entity/dag/entity_actions_test.go
index e6888148..fd219644 100644
--- a/entity/dag/entity_actions_test.go
+++ b/entity/dag/entity_actions_test.go
@@ -11,10 +11,10 @@ import (
"github.com/MichaelMure/git-bug/repository"
)
-func allEntities(t testing.TB, bugs <-chan StreamedEntity) []*Entity {
+func allEntities(t testing.TB, bugs <-chan entity.StreamedEntity[*Foo]) []*Foo {
t.Helper()
- var result []*Entity
+ var result []*Foo
for streamed := range bugs {
require.NoError(t, streamed.Err)
@@ -36,10 +36,10 @@ func TestEntityPushPull(t *testing.T) {
_, err = Push(def, repoA, "remote")
require.NoError(t, err)
- err = Pull(def, repoB, resolvers, "remote", id1)
+ err = Pull(def, wrapper, repoB, resolvers, "remote", id1)
require.NoError(t, err)
- entities := allEntities(t, ReadAll(def, repoB, resolvers))
+ entities := allEntities(t, ReadAll(def, wrapper, repoB, resolvers))
require.Len(t, entities, 1)
// B --> remote --> A
@@ -52,10 +52,10 @@ func TestEntityPushPull(t *testing.T) {
_, err = Push(def, repoB, "remote")
require.NoError(t, err)
- err = Pull(def, repoA, resolvers, "remote", id1)
+ err = Pull(def, wrapper, repoA, resolvers, "remote", id1)
require.NoError(t, err)
- entities = allEntities(t, ReadAll(def, repoB, resolvers))
+ entities = allEntities(t, ReadAll(def, wrapper, repoB, resolvers))
require.Len(t, entities, 2)
}
@@ -85,7 +85,7 @@ func TestListLocalIds(t *testing.T) {
listLocalIds(t, def, repoA, 2)
listLocalIds(t, def, repoB, 0)
- err = Pull(def, repoB, resolvers, "remote", id1)
+ err = Pull(def, wrapper, repoB, resolvers, "remote", id1)
require.NoError(t, err)
listLocalIds(t, def, repoA, 2)
@@ -228,7 +228,7 @@ func TestMerge(t *testing.T) {
_, err = Fetch(def, repoB, "remote")
require.NoError(t, err)
- results := MergeAll(def, repoB, resolvers, "remote", id1)
+ results := MergeAll(def, wrapper, repoB, resolvers, "remote", id1)
assertMergeResults(t, []entity.MergeResult{
{
@@ -246,7 +246,7 @@ func TestMerge(t *testing.T) {
// SCENARIO 2
// if the remote and local Entity have the same state, nothing is changed
- results = MergeAll(def, repoB, resolvers, "remote", id1)
+ results = MergeAll(def, wrapper, repoB, resolvers, "remote", id1)
assertMergeResults(t, []entity.MergeResult{
{
@@ -272,7 +272,7 @@ func TestMerge(t *testing.T) {
err = e2A.Commit(repoA)
require.NoError(t, err)
- results = MergeAll(def, repoA, resolvers, "remote", id1)
+ results = MergeAll(def, wrapper, repoA, resolvers, "remote", id1)
assertMergeResults(t, []entity.MergeResult{
{
@@ -297,7 +297,7 @@ func TestMerge(t *testing.T) {
_, err = Fetch(def, repoB, "remote")
require.NoError(t, err)
- results = MergeAll(def, repoB, resolvers, "remote", id1)
+ results = MergeAll(def, wrapper, repoB, resolvers, "remote", id1)
assertMergeResults(t, []entity.MergeResult{
{
@@ -324,10 +324,10 @@ func TestMerge(t *testing.T) {
err = e2A.Commit(repoA)
require.NoError(t, err)
- e1B, err := Read(def, repoB, resolvers, e1A.Id())
+ e1B, err := Read(def, wrapper, repoB, resolvers, e1A.Id())
require.NoError(t, err)
- e2B, err := Read(def, repoB, resolvers, e2A.Id())
+ e2B, err := Read(def, wrapper, repoB, resolvers, e2A.Id())
require.NoError(t, err)
e1B.Append(newOp1(id1, "barbarfoofoo"))
@@ -344,7 +344,7 @@ func TestMerge(t *testing.T) {
_, err = Fetch(def, repoB, "remote")
require.NoError(t, err)
- results = MergeAll(def, repoB, resolvers, "remote", id1)
+ results = MergeAll(def, wrapper, repoB, resolvers, "remote", id1)
assertMergeResults(t, []entity.MergeResult{
{
@@ -365,7 +365,7 @@ func TestMerge(t *testing.T) {
_, err = Fetch(def, repoA, "remote")
require.NoError(t, err)
- results = MergeAll(def, repoA, resolvers, "remote", id1)
+ results = MergeAll(def, wrapper, repoA, resolvers, "remote", id1)
assertMergeResults(t, []entity.MergeResult{
{
@@ -396,10 +396,10 @@ func TestRemove(t *testing.T) {
err = Remove(def, repoA, e.Id())
require.NoError(t, err)
- _, err = Read(def, repoA, resolvers, e.Id())
+ _, err = Read(def, wrapper, repoA, resolvers, e.Id())
require.Error(t, err)
- _, err = readRemote(def, repoA, resolvers, "remote", e.Id())
+ _, err = readRemote(def, wrapper, repoA, resolvers, "remote", e.Id())
require.Error(t, err)
// Remove is idempotent
diff --git a/entity/dag/entity_test.go b/entity/dag/entity_test.go
index e399b6c7..c457eb21 100644
--- a/entity/dag/entity_test.go
+++ b/entity/dag/entity_test.go
@@ -9,7 +9,7 @@ import (
func TestWriteRead(t *testing.T) {
repo, id1, id2, resolver, def := makeTestContext()
- entity := New(def)
+ entity := wrapper(New(def))
require.False(t, entity.NeedCommit())
entity.Append(newOp1(id1, "foo"))
@@ -24,16 +24,16 @@ func TestWriteRead(t *testing.T) {
require.NoError(t, entity.CommitAsNeeded(repo))
require.False(t, entity.NeedCommit())
- read, err := Read(def, repo, resolver, entity.Id())
+ read, err := Read(def, wrapper, repo, resolver, entity.Id())
require.NoError(t, err)
- assertEqualEntities(t, entity, read)
+ assertEqualEntities(t, entity.Entity, read.Entity)
}
func TestWriteReadMultipleAuthor(t *testing.T) {
repo, id1, id2, resolver, def := makeTestContext()
- entity := New(def)
+ entity := wrapper(New(def))
entity.Append(newOp1(id1, "foo"))
entity.Append(newOp2(id2, "bar"))
@@ -43,10 +43,10 @@ func TestWriteReadMultipleAuthor(t *testing.T) {
entity.Append(newOp2(id1, "foobar"))
require.NoError(t, entity.CommitAsNeeded(repo))
- read, err := Read(def, repo, resolver, entity.Id())
+ read, err := Read(def, wrapper, repo, resolver, entity.Id())
require.NoError(t, err)
- assertEqualEntities(t, entity, read)
+ assertEqualEntities(t, entity.Entity, read.Entity)
}
func assertEqualEntities(t *testing.T, a, b *Entity) {
diff --git a/entity/dag/example_test.go b/entity/dag/example_test.go
index b1511dc6..a263eb2b 100644
--- a/entity/dag/example_test.go
+++ b/entity/dag/example_test.go
@@ -200,7 +200,11 @@ type ProjectConfig struct {
}
func NewProjectConfig() *ProjectConfig {
- return &ProjectConfig{Entity: dag.New(def)}
+ return wrapper(dag.New(def))
+}
+
+func wrapper(e *dag.Entity) *ProjectConfig {
+ return &ProjectConfig{Entity: e}
}
// a Definition describes a few properties of the Entity, a sort of configuration to manipulate the
@@ -282,11 +286,7 @@ func (pc ProjectConfig) Compile() *Snapshot {
// Read is a helper to load a ProjectConfig from a Repository
func Read(repo repository.ClockedRepo, id entity.Id) (*ProjectConfig, error) {
- e, err := dag.Read(def, repo, simpleResolvers(repo), id)
- if err != nil {
- return nil, err
- }
- return &ProjectConfig{Entity: e}, nil
+ return dag.Read(def, wrapper, repo, simpleResolvers(repo), id)
}
func simpleResolvers(repo repository.ClockedRepo) entity.Resolvers {
@@ -331,7 +331,7 @@ func Example_entity() {
_ = confRene.Commit(repoRene)
// Isaac pull and read the config
- _ = dag.Pull(def, repoIsaac, simpleResolvers(repoIsaac), "origin", isaac)
+ _ = dag.Pull(def, wrapper, repoIsaac, simpleResolvers(repoIsaac), "origin", isaac)
confIsaac, _ := Read(repoIsaac, confRene.Id())
// Compile gives the current state of the config
diff --git a/entity/dag/interface.go b/entity/dag/interface.go
index 613f60e6..80abaced 100644
--- a/entity/dag/interface.go
+++ b/entity/dag/interface.go
@@ -25,6 +25,10 @@ type Interface[SnapT Snapshot, OpT Operation] interface {
// Commit writes the staging area in Git and move the operations to the packs
Commit(repo repository.ClockedRepo) error
+ // CommitAsNeeded execute a Commit only if necessary. This function is useful to avoid getting an error if the Entity
+ // is already in sync with the repository.
+ CommitAsNeeded(repo repository.ClockedRepo) error
+
// FirstOp lookup for the very first operation of the Entity.
FirstOp() OpT
@@ -32,7 +36,7 @@ type Interface[SnapT Snapshot, OpT Operation] interface {
// For a valid Entity, should never be nil
LastOp() OpT
- // Compile a bug in an easily usable snapshot
+ // Compile an Entity in an easily usable snapshot
Compile() SnapT
// CreateLamportTime return the Lamport time of creation
diff --git a/entity/dag/op_set_metadata_test.go b/entity/dag/op_set_metadata_test.go
index f4f20e8e..07ece013 100644
--- a/entity/dag/op_set_metadata_test.go
+++ b/entity/dag/op_set_metadata_test.go
@@ -12,6 +12,8 @@ import (
"github.com/stretchr/testify/require"
)
+var _ Snapshot = &snapshotMock{}
+
type snapshotMock struct {
ops []Operation
}
@@ -20,6 +22,10 @@ func (s *snapshotMock) AllOperations() []Operation {
return s.ops
}
+func (s *snapshotMock) AppendOperation(op Operation) {
+ s.ops = append(s.ops, op)
+}
+
func TestSetMetadata(t *testing.T) {
snap := &snapshotMock{}
diff --git a/entity/dag/operation.go b/entity/dag/operation.go
index 1a778878..f50d91b6 100644
--- a/entity/dag/operation.go
+++ b/entity/dag/operation.go
@@ -63,6 +63,13 @@ type Operation interface {
setExtraMetadataImmutable(key string, value string)
}
+type OperationWithApply[SnapT Snapshot] interface {
+ Operation
+
+ // Apply the operation to a Snapshot to create the final state
+ Apply(snapshot SnapT)
+}
+
// OperationWithFiles is an optional extension for an Operation that has files dependency, stored in git.
type OperationWithFiles interface {
// GetFiles return the files needed by this operation
@@ -83,6 +90,8 @@ type OperationDoesntChangeSnapshot interface {
type Snapshot interface {
// AllOperations returns all the operations that have been applied to that snapshot, in order
AllOperations() []Operation
+ // AppendOperation add an operation in the list
+ AppendOperation(op Operation)
}
// OpBase implement the common feature that every Operation should support.
diff --git a/entity/err.go b/entity/err.go
index 408e27b4..4453d36e 100644
--- a/entity/err.go
+++ b/entity/err.go
@@ -5,13 +5,34 @@ import (
"strings"
)
+// ErrNotFound is to be returned when an entity, item, element is
+// not found.
+type ErrNotFound struct {
+ typename string
+}
+
+func NewErrNotFound(typename string) *ErrNotFound {
+ return &ErrNotFound{typename: typename}
+}
+
+func (e ErrNotFound) Error() string {
+ return fmt.Sprintf("%s doesn't exist", e.typename)
+}
+
+func IsErrNotFound(err error) bool {
+ _, ok := err.(*ErrNotFound)
+ return ok
+}
+
+// ErrMultipleMatch is to be returned when more than one entity, item, element
+// is found, where only one was expected.
type ErrMultipleMatch struct {
- entityType string
- Matching []Id
+ typename string
+ Matching []Id
}
-func NewErrMultipleMatch(entityType string, matching []Id) *ErrMultipleMatch {
- return &ErrMultipleMatch{entityType: entityType, Matching: matching}
+func NewErrMultipleMatch(typename string, matching []Id) *ErrMultipleMatch {
+ return &ErrMultipleMatch{typename: typename, Matching: matching}
}
func (e ErrMultipleMatch) Error() string {
@@ -22,7 +43,7 @@ func (e ErrMultipleMatch) Error() string {
}
return fmt.Sprintf("Multiple matching %s found:\n%s",
- e.entityType,
+ e.typename,
strings.Join(matching, "\n"))
}
@@ -31,6 +52,8 @@ func IsErrMultipleMatch(err error) bool {
return ok
}
+// ErrInvalidFormat is to be returned when reading on-disk data with an unexpected
+// format or version.
type ErrInvalidFormat struct {
version uint
expected uint
diff --git a/entity/interface.go b/entity/interface.go
index fb4735e4..3035ac88 100644
--- a/entity/interface.go
+++ b/entity/interface.go
@@ -9,4 +9,6 @@ type Interface interface {
// the root of the entity.
// It is acceptable to use such a hash and keep mutating that data as long as Id() is not called.
Id() Id
+ // Validate check if the Entity data is valid
+ Validate() error
}
diff --git a/entity/resolver.go b/entity/resolver.go
index b2f831d7..bd16b901 100644
--- a/entity/resolver.go
+++ b/entity/resolver.go
@@ -5,16 +5,23 @@ import (
"sync"
)
+// Resolved is a minimal interface on which Resolver operates on.
+// Notably, this operates on Entity and Excerpt in the cache.
+type Resolved interface {
+ // Id returns the object identifier.
+ Id() Id
+}
+
// Resolver is an interface to find an Entity from its Id
type Resolver interface {
- Resolve(id Id) (Interface, error)
+ Resolve(id Id) (Resolved, error)
}
// Resolvers is a collection of Resolver, for different type of Entity
-type Resolvers map[Interface]Resolver
+type Resolvers map[Resolved]Resolver
// Resolve use the appropriate sub-resolver for the given type and find the Entity matching the Id.
-func Resolve[T Interface](rs Resolvers, id Id) (T, error) {
+func Resolve[T Resolved](rs Resolvers, id Id) (T, error) {
var zero T
for t, resolver := range rs {
switch t.(type) {
@@ -35,17 +42,17 @@ var _ Resolver = &CachedResolver{}
type CachedResolver struct {
resolver Resolver
mu sync.RWMutex
- entities map[Id]Interface
+ entities map[Id]Resolved
}
func NewCachedResolver(resolver Resolver) *CachedResolver {
return &CachedResolver{
resolver: resolver,
- entities: make(map[Id]Interface),
+ entities: make(map[Id]Resolved),
}
}
-func (c *CachedResolver) Resolve(id Id) (Interface, error) {
+func (c *CachedResolver) Resolve(id Id) (Resolved, error) {
c.mu.RLock()
if i, ok := c.entities[id]; ok {
c.mu.RUnlock()
@@ -64,18 +71,18 @@ func (c *CachedResolver) Resolve(id Id) (Interface, error) {
return i, nil
}
-var _ Resolver = ResolverFunc(nil)
+var _ Resolver = ResolverFunc[Resolved](nil)
// ResolverFunc is a helper to morph a function resolver into a Resolver
-type ResolverFunc func(id Id) (Interface, error)
+type ResolverFunc[EntityT Resolved] func(id Id) (EntityT, error)
-func (fn ResolverFunc) Resolve(id Id) (Interface, error) {
+func (fn ResolverFunc[EntityT]) Resolve(id Id) (Resolved, error) {
return fn(id)
}
// MakeResolver create a resolver able to return the given entities.
-func MakeResolver(entities ...Interface) Resolver {
- return ResolverFunc(func(id Id) (Interface, error) {
+func MakeResolver(entities ...Resolved) Resolver {
+ return ResolverFunc[Resolved](func(id Id) (Resolved, error) {
for _, entity := range entities {
if entity.Id() == id {
return entity, nil
diff --git a/entity/streamed.go b/entity/streamed.go
new file mode 100644
index 00000000..789224a3
--- /dev/null
+++ b/entity/streamed.go
@@ -0,0 +1,6 @@
+package entity
+
+type StreamedEntity[EntityT Interface] struct {
+ Entity EntityT
+ Err error
+}
diff --git a/go.mod b/go.mod
index 0016de2f..8fa3bbb2 100644
--- a/go.mod
+++ b/go.mod
@@ -4,7 +4,7 @@ go 1.18
require (
github.com/99designs/gqlgen v0.17.20
- github.com/99designs/keyring v1.2.1
+ github.com/99designs/keyring v1.2.2
github.com/MichaelMure/go-term-text v0.3.1
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7
github.com/araddon/dateparse v0.0.0-20190622164848-0fb0a474d195
@@ -26,20 +26,18 @@ require (
github.com/spf13/cobra v1.6.1
github.com/stretchr/testify v1.8.1
github.com/vektah/gqlparser/v2 v2.5.1
- github.com/xanzy/go-gitlab v0.74.0
+ github.com/xanzy/go-gitlab v0.77.0
golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838
golang.org/x/oauth2 v0.0.0-20220722155238-128564f6959c
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4
- golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab
- golang.org/x/text v0.4.0
+ golang.org/x/sys v0.3.0
+ golang.org/x/text v0.5.0
)
require (
github.com/lithammer/dedent v1.1.0 // indirect
github.com/owenrumney/go-sarif v1.0.11 // indirect
github.com/segmentio/fasthash v1.0.3 // indirect
- github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 // indirect
- github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546 // indirect
github.com/zclconf/go-cty v1.8.4 // indirect
golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e // indirect
)
@@ -107,7 +105,7 @@ require (
go.etcd.io/bbolt v1.3.5 // indirect
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
golang.org/x/net v0.0.0-20220805013720-a33c5aa5df48 // indirect
- golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect
+ golang.org/x/term v0.3.0 // indirect
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 // indirect
golang.org/x/tools v0.1.13-0.20220803210227-8b9a1fbdf5c3 // indirect
golang.org/x/vuln v0.0.0-20220908155419-5537ad2271a7
diff --git a/go.sum b/go.sum
index 683326c2..e0f01747 100644
--- a/go.sum
+++ b/go.sum
@@ -2,8 +2,8 @@ github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMb
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4=
github.com/99designs/gqlgen v0.17.20 h1:O7WzccIhKB1dm+7g6dhQcULINftfiLSBg2l/mwbpJMw=
github.com/99designs/gqlgen v0.17.20/go.mod h1:Mja2HI23kWT1VRH09hvWshFgOzKswpO20o4ScpJIES4=
-github.com/99designs/keyring v1.2.1 h1:tYLp1ULvO7i3fI5vE21ReQuj99QFSs7lGm0xWyJo87o=
-github.com/99designs/keyring v1.2.1/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA=
+github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0=
+github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/toml v1.1.0 h1:ksErzDEI1khOiGPgpwuI7x2ebx/uXQNw7xJpn9Eq1+I=
github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
@@ -253,10 +253,6 @@ github.com/shurcooL/githubv4 v0.0.0-20190601194912-068505affed7 h1:Vk3RiBQpF0Ja+
github.com/shurcooL/githubv4 v0.0.0-20190601194912-068505affed7/go.mod h1:hAF0iLZy4td2EX+/8Tw+4nodhlMrwN3HupfaXj3zkGo=
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk=
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg=
-github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 h1:bUGsEnyNbVPw06Bs80sCeARAlK8lhwqGyi6UT8ymuGk=
-github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
-github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546 h1:pXY9qYc/MP5zdvqWEUH6SjNiu7VhSjuVFTFiTcphaLU=
-github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/skratchdot/open-golang v0.0.0-20190402232053-79abb63cd66e h1:VAzdS5Nw68fbf5RZ8RDVlUvPXNU6Z3jtPCK/qvm4FoQ=
github.com/skratchdot/open-golang v0.0.0-20190402232053-79abb63cd66e/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
@@ -301,8 +297,8 @@ github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+
github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
github.com/willf/bitset v1.1.10 h1:NotGKqX0KwQ72NUzqrjZq5ipPNDQex9lo3WpaS8L2sc=
github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
-github.com/xanzy/go-gitlab v0.74.0 h1:Ha1cokbjn0PXy6B19t3W324dwM4AOT52fuHr7nERPrc=
-github.com/xanzy/go-gitlab v0.74.0/go.mod h1:d/a0vswScO7Agg1CZNz15Ic6SSvBG9vfw8egL99t4kA=
+github.com/xanzy/go-gitlab v0.77.0 h1:UrbGlxkWVCbkpa6Fk6cM8ARh+rLACWemkJnsawT7t98=
+github.com/xanzy/go-gitlab v0.77.0/go.mod h1:d/a0vswScO7Agg1CZNz15Ic6SSvBG9vfw8egL99t4kA=
github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=
github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
@@ -367,20 +363,22 @@ golang.org/x/sys v0.0.0-20210819135213-f52c844e1c1c/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab h1:2QkjZIsXupsJbJIdSjjUOgWK3aEtzyuh2mPt3l/CkeU=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
+golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
-golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI=
+golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
-golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg=
-golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM=
+golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 h1:ftMN5LMiBFjbzleLqtoBZk7KdJwhuybIU+FckUHgoyQ=
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
diff --git a/repository/gogit.go b/repository/gogit.go
index 35934c91..01c47d41 100644
--- a/repository/gogit.go
+++ b/repository/gogit.go
@@ -13,7 +13,6 @@ import (
"time"
"github.com/ProtonMail/go-crypto/openpgp"
- "github.com/blevesearch/bleve"
"github.com/go-git/go-billy/v5"
"github.com/go-git/go-billy/v5/osfs"
gogit "github.com/go-git/go-git/v5"
@@ -46,7 +45,7 @@ type GoGitRepo struct {
clocks map[string]lamport.Clock
indexesMutex sync.Mutex
- indexes map[string]bleve.Index
+ indexes map[string]Index
keyring Keyring
localStorage billy.Filesystem
@@ -76,7 +75,7 @@ func OpenGoGitRepo(path, namespace string, clockLoaders []ClockLoader) (*GoGitRe
r: r,
path: path,
clocks: make(map[string]lamport.Clock),
- indexes: make(map[string]bleve.Index),
+ indexes: make(map[string]Index),
keyring: k,
localStorage: osfs.New(filepath.Join(path, namespace)),
}
@@ -130,7 +129,7 @@ func InitGoGitRepo(path, namespace string) (*GoGitRepo, error) {
r: r,
path: filepath.Join(path, ".git"),
clocks: make(map[string]lamport.Clock),
- indexes: make(map[string]bleve.Index),
+ indexes: make(map[string]Index),
keyring: k,
localStorage: osfs.New(filepath.Join(path, ".git", namespace)),
}, nil
@@ -155,7 +154,7 @@ func InitBareGoGitRepo(path, namespace string) (*GoGitRepo, error) {
r: r,
path: path,
clocks: make(map[string]lamport.Clock),
- indexes: make(map[string]bleve.Index),
+ indexes: make(map[string]Index),
keyring: k,
localStorage: osfs.New(filepath.Join(path, namespace)),
}, nil
@@ -219,11 +218,12 @@ func isGitDir(path string) (bool, error) {
func (repo *GoGitRepo) Close() error {
var firstErr error
- for _, index := range repo.indexes {
+ for name, index := range repo.indexes {
err := index.Close()
if err != nil && firstErr == nil {
firstErr = err
}
+ delete(repo.indexes, name)
}
return firstErr
}
@@ -324,8 +324,7 @@ func (repo *GoGitRepo) LocalStorage() billy.Filesystem {
return repo.localStorage
}
-// GetBleveIndex return a bleve.Index that can be used to index documents
-func (repo *GoGitRepo) GetBleveIndex(name string) (bleve.Index, error) {
+func (repo *GoGitRepo) GetIndex(name string) (Index, error) {
repo.indexesMutex.Lock()
defer repo.indexesMutex.Unlock()
@@ -335,63 +334,28 @@ func (repo *GoGitRepo) GetBleveIndex(name string) (bleve.Index, error) {
path := filepath.Join(repo.localStorage.Root(), indexPath, name)
- index, err := bleve.Open(path)
+ index, err := openBleveIndex(path)
if err == nil {
repo.indexes[name] = index
- return index, nil
- }
-
- err = os.MkdirAll(path, os.ModePerm)
- if err != nil {
- return nil, err
- }
-
- mapping := bleve.NewIndexMapping()
- mapping.DefaultAnalyzer = "en"
-
- index, err = bleve.New(path, mapping)
- if err != nil {
- return nil, err
}
-
- repo.indexes[name] = index
-
- return index, nil
-}
-
-// ClearBleveIndex will wipe the given index
-func (repo *GoGitRepo) ClearBleveIndex(name string) error {
- repo.indexesMutex.Lock()
- defer repo.indexesMutex.Unlock()
-
- if index, ok := repo.indexes[name]; ok {
- err := index.Close()
- if err != nil {
- return err
- }
- delete(repo.indexes, name)
- }
-
- path := filepath.Join(repo.localStorage.Root(), indexPath, name)
- err := os.RemoveAll(path)
- if err != nil {
- return err
- }
-
- return nil
+ return index, err
}
// FetchRefs fetch git refs matching a directory prefix to a remote
// Ex: prefix="foo" will fetch any remote refs matching "refs/foo/*" locally.
// The equivalent git refspec would be "refs/foo/*:refs/remotes/<remote>/foo/*"
-func (repo *GoGitRepo) FetchRefs(remote string, prefix string) (string, error) {
- refspec := fmt.Sprintf("refs/%s/*:refs/remotes/%s/%s/*", prefix, remote, prefix)
+func (repo *GoGitRepo) FetchRefs(remote string, prefixes ...string) (string, error) {
+ refSpecs := make([]config.RefSpec, len(prefixes))
+
+ for i, prefix := range prefixes {
+ refSpecs[i] = config.RefSpec(fmt.Sprintf("refs/%s/*:refs/remotes/%s/%s/*", prefix, remote, prefix))
+ }
buf := bytes.NewBuffer(nil)
err := repo.r.Fetch(&gogit.FetchOptions{
RemoteName: remote,
- RefSpecs: []config.RefSpec{config.RefSpec(refspec)},
+ RefSpecs: refSpecs,
Progress: buf,
})
if err == gogit.NoErrAlreadyUpToDate {
@@ -410,35 +374,41 @@ func (repo *GoGitRepo) FetchRefs(remote string, prefix string) (string, error) {
//
// Additionally, PushRefs will update the local references in refs/remotes/<remote>/foo to match
// the remote state.
-func (repo *GoGitRepo) PushRefs(remote string, prefix string) (string, error) {
- refspec := fmt.Sprintf("refs/%s/*:refs/%s/*", prefix, prefix)
-
+func (repo *GoGitRepo) PushRefs(remote string, prefixes ...string) (string, error) {
remo, err := repo.r.Remote(remote)
if err != nil {
return "", err
}
- // to make sure that the push also create the corresponding refs/remotes/<remote>/... references,
- // we need to have a default fetch refspec configured on the remote, to make our refs "track" the remote ones.
- // This does not change the config on disk, only on memory.
- hasCustomFetch := false
- fetchRefspec := fmt.Sprintf("refs/%s/*:refs/remotes/%s/%s/*", prefix, remote, prefix)
- for _, r := range remo.Config().Fetch {
- if string(r) == fetchRefspec {
- hasCustomFetch = true
- break
+ refSpecs := make([]config.RefSpec, len(prefixes))
+
+ for i, prefix := range prefixes {
+ refspec := fmt.Sprintf("refs/%s/*:refs/%s/*", prefix, prefix)
+
+ // to make sure that the push also create the corresponding refs/remotes/<remote>/... references,
+ // we need to have a default fetch refspec configured on the remote, to make our refs "track" the remote ones.
+ // This does not change the config on disk, only on memory.
+ hasCustomFetch := false
+ fetchRefspec := fmt.Sprintf("refs/%s/*:refs/remotes/%s/%s/*", prefix, remote, prefix)
+ for _, r := range remo.Config().Fetch {
+ if string(r) == fetchRefspec {
+ hasCustomFetch = true
+ break
+ }
}
- }
- if !hasCustomFetch {
- remo.Config().Fetch = append(remo.Config().Fetch, config.RefSpec(fetchRefspec))
+ if !hasCustomFetch {
+ remo.Config().Fetch = append(remo.Config().Fetch, config.RefSpec(fetchRefspec))
+ }
+
+ refSpecs[i] = config.RefSpec(refspec)
}
buf := bytes.NewBuffer(nil)
err = remo.Push(&gogit.PushOptions{
RemoteName: remote,
- RefSpecs: []config.RefSpec{config.RefSpec(refspec)},
+ RefSpecs: refSpecs,
Progress: buf,
})
if err == gogit.NoErrAlreadyUpToDate {
@@ -480,6 +450,9 @@ func (repo *GoGitRepo) ReadData(hash Hash) ([]byte, error) {
defer repo.rMutex.Unlock()
obj, err := repo.r.BlobObject(plumbing.NewHash(hash.String()))
+ if err == plumbing.ErrObjectNotFound {
+ return nil, ErrNotFound
+ }
if err != nil {
return nil, err
}
@@ -549,6 +522,9 @@ func (repo *GoGitRepo) ReadTree(hash Hash) ([]TreeEntry, error) {
// the given hash could be a tree or a commit
obj, err := repo.r.Storer.EncodedObject(plumbing.AnyObject, h)
+ if err == plumbing.ErrObjectNotFound {
+ return nil, ErrNotFound
+ }
if err != nil {
return nil, err
}
@@ -655,43 +631,11 @@ func (repo *GoGitRepo) StoreSignedCommit(treeHash Hash, signKey *openpgp.Entity,
return Hash(hash.String()), nil
}
-// GetTreeHash return the git tree hash referenced in a commit
-func (repo *GoGitRepo) GetTreeHash(commit Hash) (Hash, error) {
- repo.rMutex.Lock()
- defer repo.rMutex.Unlock()
-
- obj, err := repo.r.CommitObject(plumbing.NewHash(commit.String()))
- if err != nil {
- return "", err
- }
-
- return Hash(obj.TreeHash.String()), nil
-}
-
-// FindCommonAncestor will return the last common ancestor of two chain of commit
-func (repo *GoGitRepo) FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error) {
- repo.rMutex.Lock()
- defer repo.rMutex.Unlock()
-
- obj1, err := repo.r.CommitObject(plumbing.NewHash(commit1.String()))
- if err != nil {
- return "", err
- }
- obj2, err := repo.r.CommitObject(plumbing.NewHash(commit2.String()))
- if err != nil {
- return "", err
- }
-
- commits, err := obj1.MergeBase(obj2)
- if err != nil {
- return "", err
- }
-
- return Hash(commits[0].Hash.String()), nil
-}
-
func (repo *GoGitRepo) ResolveRef(ref string) (Hash, error) {
r, err := repo.r.Reference(plumbing.ReferenceName(ref), false)
+ if err == plumbing.ErrReferenceNotFound {
+ return "", ErrNotFound
+ }
if err != nil {
return "", err
}
@@ -744,6 +688,9 @@ func (repo *GoGitRepo) RefExist(ref string) (bool, error) {
// CopyRef will create a new reference with the same value as another one
func (repo *GoGitRepo) CopyRef(source string, dest string) error {
r, err := repo.r.Reference(plumbing.ReferenceName(source), false)
+ if err == plumbing.ErrReferenceNotFound {
+ return ErrNotFound
+ }
if err != nil {
return err
}
@@ -760,6 +707,9 @@ func (repo *GoGitRepo) ReadCommit(hash Hash) (Commit, error) {
defer repo.rMutex.Unlock()
commit, err := repo.r.CommitObject(plumbing.NewHash(hash.String()))
+ if err == plumbing.ErrObjectNotFound {
+ return Commit{}, ErrNotFound
+ }
if err != nil {
return Commit{}, err
}
diff --git a/repository/gogit_test.go b/repository/gogit_test.go
index a3de0a03..02bd42fd 100644
--- a/repository/gogit_test.go
+++ b/repository/gogit_test.go
@@ -65,24 +65,19 @@ func TestGoGitRepo_Indexes(t *testing.T) {
plainRoot := goGitRepoDir(t, repo)
// Can create indices
- indexA, err := repo.GetBleveIndex("a")
+ indexA, err := repo.GetIndex("a")
require.NoError(t, err)
require.NotZero(t, indexA)
require.FileExists(t, filepath.Join(plainRoot, ".git", namespace, "indexes", "a", "index_meta.json"))
require.FileExists(t, filepath.Join(plainRoot, ".git", namespace, "indexes", "a", "store"))
- indexB, err := repo.GetBleveIndex("b")
+ indexB, err := repo.GetIndex("b")
require.NoError(t, err)
require.NotZero(t, indexB)
require.DirExists(t, filepath.Join(plainRoot, ".git", namespace, "indexes", "b"))
// Can get an existing index
- indexA, err = repo.GetBleveIndex("a")
+ indexA, err = repo.GetIndex("a")
require.NoError(t, err)
require.NotZero(t, indexA)
-
- // Can delete an index
- err = repo.ClearBleveIndex("a")
- require.NoError(t, err)
- require.NoDirExists(t, filepath.Join(plainRoot, ".git", namespace, "indexes", "a"))
}
diff --git a/repository/hash.go b/repository/hash.go
index 6a11558f..ad0206c3 100644
--- a/repository/hash.go
+++ b/repository/hash.go
@@ -43,7 +43,7 @@ func (h *Hash) IsValid() bool {
return false
}
for _, r := range *h {
- if (r < 'a' || r > 'z') && (r < '0' || r > '9') {
+ if (r < 'a' || r > 'f') && (r < '0' || r > '9') {
return false
}
}
diff --git a/repository/index_bleve.go b/repository/index_bleve.go
new file mode 100644
index 00000000..aae41d5f
--- /dev/null
+++ b/repository/index_bleve.go
@@ -0,0 +1,154 @@
+package repository
+
+import (
+ "fmt"
+ "os"
+ "strings"
+ "sync"
+ "unicode/utf8"
+
+ "github.com/blevesearch/bleve"
+)
+
+var _ Index = &bleveIndex{}
+
+type bleveIndex struct {
+ path string
+
+ mu sync.RWMutex
+ index bleve.Index
+}
+
+func openBleveIndex(path string) (*bleveIndex, error) {
+ index, err := bleve.Open(path)
+ if err == nil {
+ return &bleveIndex{path: path, index: index}, nil
+ }
+
+ b := &bleveIndex{path: path}
+ err = b.makeIndex()
+ if err != nil {
+ return nil, err
+ }
+
+ return b, nil
+}
+
+func (b *bleveIndex) makeIndex() error {
+ err := os.MkdirAll(b.path, os.ModePerm)
+ if err != nil {
+ return err
+ }
+
+ // TODO: follow https://github.com/blevesearch/bleve/issues/1576 recommendations
+
+ mapping := bleve.NewIndexMapping()
+ mapping.DefaultAnalyzer = "en"
+
+ index, err := bleve.New(b.path, mapping)
+ if err != nil {
+ return err
+ }
+ b.index = index
+ return nil
+}
+
+func (b *bleveIndex) IndexOne(id string, texts []string) error {
+ b.mu.Lock()
+ defer b.mu.Unlock()
+ return b._index(b.index.Index, id, texts)
+}
+
+func (b *bleveIndex) IndexBatch() (indexer func(id string, texts []string) error, closer func() error) {
+ b.mu.Lock()
+ defer b.mu.Unlock()
+
+ batch := b.index.NewBatch()
+
+ indexer = func(id string, texts []string) error {
+ return b._index(batch.Index, id, texts)
+ }
+
+ closer = func() error {
+ return b.index.Batch(batch)
+ }
+
+ return indexer, closer
+}
+
+func (b *bleveIndex) _index(indexer func(string, interface{}) error, id string, texts []string) error {
+ searchable := struct{ Text []string }{Text: texts}
+
+ // See https://github.com/blevesearch/bleve/issues/1576
+ var sb strings.Builder
+ normalize := func(text string) string {
+ sb.Reset()
+ for _, field := range strings.Fields(text) {
+ if utf8.RuneCountInString(field) < 100 {
+ sb.WriteString(field)
+ sb.WriteRune(' ')
+ }
+ }
+ return sb.String()
+ }
+
+ for i, s := range searchable.Text {
+ searchable.Text[i] = normalize(s)
+ }
+
+ return indexer(id, searchable)
+}
+
+func (b *bleveIndex) Search(terms []string) ([]string, error) {
+ b.mu.RLock()
+ defer b.mu.RUnlock()
+
+ for i, term := range terms {
+ if strings.Contains(term, " ") {
+ terms[i] = fmt.Sprintf("\"%s\"", term)
+ }
+ }
+
+ query := bleve.NewQueryStringQuery(strings.Join(terms, " "))
+ search := bleve.NewSearchRequest(query)
+
+ res, err := b.index.Search(search)
+ if err != nil {
+ return nil, err
+ }
+
+ ids := make([]string, len(res.Hits))
+ for i, hit := range res.Hits {
+ ids[i] = hit.ID
+ }
+
+ return ids, nil
+}
+
+func (b *bleveIndex) DocCount() (uint64, error) {
+ return b.index.DocCount()
+}
+
+func (b *bleveIndex) Clear() error {
+ b.mu.Lock()
+ defer b.mu.Unlock()
+
+ err := b.index.Close()
+ if err != nil {
+ return err
+ }
+
+ err = os.RemoveAll(b.path)
+ if err != nil {
+ return err
+ }
+
+ return b.makeIndex()
+}
+
+func (b *bleveIndex) Close() error {
+ b.mu.Lock()
+ defer b.mu.Unlock()
+
+ return b.index.Close()
+}
diff --git a/repository/mock_repo.go b/repository/mock_repo.go
index 3d7f0e73..c2cef8ef 100644
--- a/repository/mock_repo.go
+++ b/repository/mock_repo.go
@@ -9,7 +9,6 @@ import (
"github.com/99designs/keyring"
"github.com/ProtonMail/go-crypto/openpgp"
- "github.com/blevesearch/bleve"
"github.com/go-git/go-billy/v5"
"github.com/go-git/go-billy/v5/memfs"
@@ -25,7 +24,7 @@ type mockRepo struct {
*mockRepoKeyring
*mockRepoCommon
*mockRepoStorage
- *mockRepoBleve
+ *mockRepoIndex
*mockRepoData
*mockRepoClock
*mockRepoTest
@@ -39,7 +38,7 @@ func NewMockRepo() *mockRepo {
mockRepoKeyring: NewMockRepoKeyring(),
mockRepoCommon: NewMockRepoCommon(),
mockRepoStorage: NewMockRepoStorage(),
- mockRepoBleve: newMockRepoBleve(),
+ mockRepoIndex: newMockRepoIndex(),
mockRepoData: NewMockRepoData(),
mockRepoClock: NewMockRepoClock(),
mockRepoTest: NewMockRepoTest(),
@@ -135,20 +134,20 @@ func (m *mockRepoStorage) LocalStorage() billy.Filesystem {
return m.localFs
}
-var _ RepoBleve = &mockRepoBleve{}
+var _ RepoIndex = &mockRepoIndex{}
-type mockRepoBleve struct {
+type mockRepoIndex struct {
indexesMutex sync.Mutex
- indexes map[string]bleve.Index
+ indexes map[string]Index
}
-func newMockRepoBleve() *mockRepoBleve {
- return &mockRepoBleve{
- indexes: make(map[string]bleve.Index),
+func newMockRepoIndex() *mockRepoIndex {
+ return &mockRepoIndex{
+ indexes: make(map[string]Index),
}
}
-func (m *mockRepoBleve) GetBleveIndex(name string) (bleve.Index, error) {
+func (m *mockRepoIndex) GetIndex(name string) (Index, error) {
m.indexesMutex.Lock()
defer m.indexesMutex.Unlock()
@@ -156,24 +155,63 @@ func (m *mockRepoBleve) GetBleveIndex(name string) (bleve.Index, error) {
return index, nil
}
- mapping := bleve.NewIndexMapping()
- mapping.DefaultAnalyzer = "en"
+ index := newIndex()
+ m.indexes[name] = index
+ return index, nil
+}
- index, err := bleve.NewMemOnly(mapping)
- if err != nil {
- return nil, err
- }
+var _ Index = &mockIndex{}
- m.indexes[name] = index
+type mockIndex map[string][]string
- return index, nil
+func newIndex() *mockIndex {
+ m := make(map[string][]string)
+ return (*mockIndex)(&m)
}
-func (m *mockRepoBleve) ClearBleveIndex(name string) error {
- m.indexesMutex.Lock()
- defer m.indexesMutex.Unlock()
+func (m *mockIndex) IndexOne(id string, texts []string) error {
+ (*m)[id] = texts
+ return nil
+}
+
+func (m *mockIndex) IndexBatch() (indexer func(id string, texts []string) error, closer func() error) {
+ indexer = func(id string, texts []string) error {
+ (*m)[id] = texts
+ return nil
+ }
+ closer = func() error { return nil }
+ return indexer, closer
+}
+
+func (m *mockIndex) Search(terms []string) (ids []string, err error) {
+loop:
+ for id, texts := range *m {
+ for _, text := range texts {
+ for _, s := range strings.Fields(text) {
+ for _, term := range terms {
+ if s == term {
+ ids = append(ids, id)
+ continue loop
+ }
+ }
+ }
+ }
+ }
+ return ids, nil
+}
+
+func (m *mockIndex) DocCount() (uint64, error) {
+ return uint64(len(*m)), nil
+}
+
+func (m *mockIndex) Clear() error {
+ for k, _ := range *m {
+ delete(*m, k)
+ }
+ return nil
+}
- delete(m.indexes, name)
+func (m *mockIndex) Close() error {
return nil
}
@@ -201,12 +239,12 @@ func NewMockRepoData() *mockRepoData {
}
}
-func (r *mockRepoData) FetchRefs(remote string, prefix string) (string, error) {
+func (r *mockRepoData) FetchRefs(remote string, prefixes ...string) (string, error) {
panic("implement me")
}
// PushRefs push git refs to a remote
-func (r *mockRepoData) PushRefs(remote string, prefix string) (string, error) {
+func (r *mockRepoData) PushRefs(remote string, prefixes ...string) (string, error) {
panic("implement me")
}
@@ -220,7 +258,7 @@ func (r *mockRepoData) StoreData(data []byte) (Hash, error) {
func (r *mockRepoData) ReadData(hash Hash) ([]byte, error) {
data, ok := r.blobs[hash]
if !ok {
- return nil, fmt.Errorf("unknown hash")
+ return nil, ErrNotFound
}
return data, nil
@@ -245,13 +283,13 @@ func (r *mockRepoData) ReadTree(hash Hash) ([]TreeEntry, error) {
commit, ok := r.commits[hash]
if !ok {
- return nil, fmt.Errorf("unknown hash")
+ return nil, ErrNotFound
}
data, ok = r.trees[commit.treeHash]
if !ok {
- return nil, fmt.Errorf("unknown hash")
+ return nil, ErrNotFound
}
}
@@ -289,7 +327,7 @@ func (r *mockRepoData) StoreSignedCommit(treeHash Hash, signKey *openpgp.Entity,
func (r *mockRepoData) ReadCommit(hash Hash) (Commit, error) {
c, ok := r.commits[hash]
if !ok {
- return Commit{}, fmt.Errorf("unknown commit")
+ return Commit{}, ErrNotFound
}
result := Commit{
@@ -308,19 +346,10 @@ func (r *mockRepoData) ReadCommit(hash Hash) (Commit, error) {
return result, nil
}
-func (r *mockRepoData) GetTreeHash(commit Hash) (Hash, error) {
- c, ok := r.commits[commit]
- if !ok {
- return "", fmt.Errorf("unknown commit")
- }
-
- return c.treeHash, nil
-}
-
func (r *mockRepoData) ResolveRef(ref string) (Hash, error) {
h, ok := r.refs[ref]
if !ok {
- return "", fmt.Errorf("unknown ref")
+ return "", ErrNotFound
}
return h, nil
}
@@ -356,48 +385,13 @@ func (r *mockRepoData) CopyRef(source string, dest string) error {
hash, exist := r.refs[source]
if !exist {
- return fmt.Errorf("Unknown ref")
+ return ErrNotFound
}
r.refs[dest] = hash
return nil
}
-func (r *mockRepoData) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) {
- ancestor1 := []Hash{hash1}
-
- for hash1 != "" {
- c, ok := r.commits[hash1]
- if !ok {
- return "", fmt.Errorf("unknown commit %v", hash1)
- }
- if len(c.parents) == 0 {
- break
- }
- ancestor1 = append(ancestor1, c.parents[0])
- hash1 = c.parents[0]
- }
-
- for {
- for _, ancestor := range ancestor1 {
- if ancestor == hash2 {
- return ancestor, nil
- }
- }
-
- c, ok := r.commits[hash2]
- if !ok {
- return "", fmt.Errorf("unknown commit %v", hash1)
- }
-
- if c.parents[0] == "" {
- return "", fmt.Errorf("no ancestor found")
- }
-
- hash2 = c.parents[0]
- }
-}
-
func (r *mockRepoData) ListCommits(ref string) ([]Hash, error) {
return nonNativeListCommits(r, ref)
}
diff --git a/repository/repo.go b/repository/repo.go
index 2f90b437..66baec65 100644
--- a/repository/repo.go
+++ b/repository/repo.go
@@ -6,7 +6,6 @@ import (
"io"
"github.com/ProtonMail/go-crypto/openpgp"
- "github.com/blevesearch/bleve"
"github.com/go-git/go-billy/v5"
"github.com/MichaelMure/git-bug/util/lamport"
@@ -17,6 +16,8 @@ var (
ErrNotARepo = errors.New("not a git repository")
// ErrClockNotExist is the error returned when a clock can't be found
ErrClockNotExist = errors.New("clock doesn't exist")
+ // ErrNotFound is the error returned when a git object can't be found
+ ErrNotFound = errors.New("ref not found")
)
// Repo represents a source code repository.
@@ -25,7 +26,7 @@ type Repo interface {
RepoKeyring
RepoCommon
RepoStorage
- RepoBleve
+ RepoIndex
RepoData
Close() error
@@ -81,13 +82,33 @@ type RepoStorage interface {
LocalStorage() billy.Filesystem
}
-// RepoBleve give access to Bleve to implement full-text search indexes.
-type RepoBleve interface {
- // GetBleveIndex return a bleve.Index that can be used to index documents
- GetBleveIndex(name string) (bleve.Index, error)
+// RepoIndex gives access to full-text search indexes
+type RepoIndex interface {
+ GetIndex(name string) (Index, error)
+}
+
+// Index is a full-text search index
+type Index interface {
+ // IndexOne indexes one document, for the given ID. If the document already exist,
+ // it replaces it.
+ IndexOne(id string, texts []string) error
+
+ // IndexBatch start a batch indexing. The returned indexer function is used the same
+ // way as IndexOne, and the closer function complete the batch insertion.
+ IndexBatch() (indexer func(id string, texts []string) error, closer func() error)
+
+ // Search returns the list of IDs matching the given terms.
+ Search(terms []string) (ids []string, err error)
+
+ // DocCount returns the number of document in the index.
+ DocCount() (uint64, error)
- // ClearBleveIndex will wipe the given index
- ClearBleveIndex(name string) error
+ // Clear empty the index.
+ Clear() error
+
+ // Close closes the index and make sure everything is safely written. After this call
+ // the index can't be used anymore.
+ Close() error
}
type Commit struct {
@@ -103,7 +124,7 @@ type RepoData interface {
// FetchRefs fetch git refs matching a directory prefix to a remote
// Ex: prefix="foo" will fetch any remote refs matching "refs/foo/*" locally.
// The equivalent git refspec would be "refs/foo/*:refs/remotes/<remote>/foo/*"
- FetchRefs(remote string, prefix string) (string, error)
+ FetchRefs(remote string, prefixes ...string) (string, error)
// PushRefs push git refs matching a directory prefix to a remote
// Ex: prefix="foo" will push any local refs matching "refs/foo/*" to the remote.
@@ -111,12 +132,13 @@ type RepoData interface {
//
// Additionally, PushRefs will update the local references in refs/remotes/<remote>/foo to match
// the remote state.
- PushRefs(remote string, prefix string) (string, error)
+ PushRefs(remote string, prefixes ...string) (string, error)
// StoreData will store arbitrary data and return the corresponding hash
StoreData(data []byte) (Hash, error)
// ReadData will attempt to read arbitrary data from the given hash
+ // Returns ErrNotFound if not found.
ReadData(hash Hash) ([]byte, error)
// StoreTree will store a mapping key-->Hash as a Git tree
@@ -124,6 +146,7 @@ type RepoData interface {
// ReadTree will return the list of entries in a Git tree
// The given hash could be from either a commit or a tree
+ // Returns ErrNotFound if not found.
ReadTree(hash Hash) ([]TreeEntry, error)
// StoreCommit will store a Git commit with the given Git tree
@@ -134,13 +157,11 @@ type RepoData interface {
StoreSignedCommit(treeHash Hash, signKey *openpgp.Entity, parents ...Hash) (Hash, error)
// ReadCommit read a Git commit and returns some of its characteristic
+ // Returns ErrNotFound if not found.
ReadCommit(hash Hash) (Commit, error)
- // GetTreeHash return the git tree hash referenced in a commit
- // Deprecated
- GetTreeHash(commit Hash) (Hash, error)
-
// ResolveRef returns the hash of the target commit of the given ref
+ // Returns ErrNotFound if not found.
ResolveRef(ref string) (Hash, error)
// UpdateRef will create or update a Git reference
@@ -157,12 +178,9 @@ type RepoData interface {
RefExist(ref string) (bool, error)
// CopyRef will create a new reference with the same value as another one
+ // Returns ErrNotFound if not found.
CopyRef(source string, dest string) error
- // FindCommonAncestor will return the last common ancestor of two chain of commit
- // Deprecated
- FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error)
-
// ListCommits will return the list of tree hashes of a ref, in chronological order
ListCommits(ref string) ([]Hash, error)
}
diff --git a/repository/repo_testing.go b/repository/repo_testing.go
index 5d51d23f..821eb762 100644
--- a/repository/repo_testing.go
+++ b/repository/repo_testing.go
@@ -10,7 +10,6 @@ import (
"github.com/MichaelMure/git-bug/util/lamport"
)
-// TODO: add tests for RepoBleve
// TODO: add tests for RepoStorage
type RepoCreator func(t testing.TB, bare bool) TestedRepo
@@ -33,6 +32,10 @@ func RepoTest(t *testing.T, creator RepoCreator) {
RepoConfigTest(t, repo)
})
+ t.Run("Index", func(t *testing.T) {
+ RepoIndexTest(t, repo)
+ })
+
t.Run("Clocks", func(t *testing.T) {
RepoClockTest(t, repo)
})
@@ -45,6 +48,15 @@ func RepoConfigTest(t *testing.T, repo RepoConfig) {
testConfig(t, repo.LocalConfig())
}
+func randomHash() Hash {
+ var letterRunes = "abcdef0123456789"
+ b := make([]byte, idLengthSHA256)
+ for i := range b {
+ b[i] = letterRunes[rand.Intn(len(letterRunes))]
+ }
+ return Hash(b)
+}
+
// helper to test a RepoData
func RepoDataTest(t *testing.T, repo RepoData) {
// Blob
@@ -59,6 +71,9 @@ func RepoDataTest(t *testing.T, repo RepoData) {
require.NoError(t, err)
require.Equal(t, data, blob1Read)
+ _, err = repo.ReadData(randomHash())
+ require.ErrorIs(t, err, ErrNotFound)
+
// Tree
blobHash2, err := repo.StoreData(randomData())
@@ -108,25 +123,20 @@ func RepoDataTest(t *testing.T, repo RepoData) {
require.NoError(t, err)
require.ElementsMatch(t, tree2, tree2Read)
+ _, err = repo.ReadTree(randomHash())
+ require.ErrorIs(t, err, ErrNotFound)
+
// Commit
commit1, err := repo.StoreCommit(treeHash1)
require.NoError(t, err)
require.True(t, commit1.IsValid())
- treeHash1Read, err := repo.GetTreeHash(commit1)
- require.NoError(t, err)
- require.Equal(t, treeHash1, treeHash1Read)
-
// commit with a parent
commit2, err := repo.StoreCommit(treeHash2, commit1)
require.NoError(t, err)
require.True(t, commit2.IsValid())
- treeHash2Read, err := repo.GetTreeHash(commit2)
- require.NoError(t, err)
- require.Equal(t, treeHash2, treeHash2Read)
-
// ReadTree should accept tree and commit hashes
tree1read, err := repo.ReadTree(commit1)
require.NoError(t, err)
@@ -137,6 +147,9 @@ func RepoDataTest(t *testing.T, repo RepoData) {
c2expected := Commit{Hash: commit2, Parents: []Hash{commit1}, TreeHash: treeHash2}
require.Equal(t, c2expected, c2)
+ _, err = repo.ReadCommit(randomHash())
+ require.ErrorIs(t, err, ErrNotFound)
+
// Ref
exist1, err := repo.RefExist("refs/bugs/ref1")
@@ -169,14 +182,13 @@ func RepoDataTest(t *testing.T, repo RepoData) {
require.NoError(t, err)
require.Equal(t, []Hash{commit1, commit2}, commits)
- // Graph
+ _, err = repo.ResolveRef("/refs/bugs/refnotexist")
+ require.ErrorIs(t, err, ErrNotFound)
- commit3, err := repo.StoreCommit(treeHash1, commit1)
- require.NoError(t, err)
+ err = repo.CopyRef("/refs/bugs/refnotexist", "refs/foo")
+ require.ErrorIs(t, err, ErrNotFound)
- ancestorHash, err := repo.FindCommonAncestor(commit2, commit3)
- require.NoError(t, err)
- require.Equal(t, commit1, ancestorHash)
+ // Cleanup
err = repo.RemoveRef("refs/bugs/ref1")
require.NoError(t, err)
@@ -234,6 +246,48 @@ func RepoDataSignatureTest(t *testing.T, repo RepoData) {
require.Error(t, err)
}
+func RepoIndexTest(t *testing.T, repo RepoIndex) {
+ idx, err := repo.GetIndex("a")
+ require.NoError(t, err)
+
+ // simple indexing
+ err = idx.IndexOne("id1", []string{"foo", "bar", "foobar barfoo"})
+ require.NoError(t, err)
+
+ // batched indexing
+ indexer, closer := idx.IndexBatch()
+ err = indexer("id2", []string{"hello", "foo bar"})
+ require.NoError(t, err)
+ err = indexer("id3", []string{"Hola", "Esta bien"})
+ require.NoError(t, err)
+ err = closer()
+ require.NoError(t, err)
+
+ // search
+ res, err := idx.Search([]string{"foobar"})
+ require.NoError(t, err)
+ require.ElementsMatch(t, []string{"id1"}, res)
+
+ res, err = idx.Search([]string{"foo"})
+ require.NoError(t, err)
+ require.ElementsMatch(t, []string{"id1", "id2"}, res)
+
+ // re-indexing an item replace previous versions
+ err = idx.IndexOne("id2", []string{"hello"})
+ require.NoError(t, err)
+
+ res, err = idx.Search([]string{"foo"})
+ require.NoError(t, err)
+ require.ElementsMatch(t, []string{"id1"}, res)
+
+ err = idx.Clear()
+ require.NoError(t, err)
+
+ res, err = idx.Search([]string{"foo"})
+ require.NoError(t, err)
+ require.Empty(t, res)
+}
+
// helper to test a RepoClock
func RepoClockTest(t *testing.T, repo RepoClock) {
allClocks, err := repo.AllClocks()
diff --git a/termui/bug_table.go b/termui/bug_table.go
index f3f8b2f3..9db13ada 100644
--- a/termui/bug_table.go
+++ b/termui/bug_table.go
@@ -239,7 +239,7 @@ func (bt *bugTable) disable(g *gocui.Gui) error {
func (bt *bugTable) paginate(max int) error {
var err error
- bt.allIds, err = bt.repo.QueryBugs(bt.query)
+ bt.allIds, err = bt.repo.Bugs().Query(bt.query)
if err != nil {
return err
}
@@ -265,7 +265,7 @@ func (bt *bugTable) doPaginate(max int) error {
bt.excerpts = make([]*cache.BugExcerpt, len(ids))
for i, id := range ids {
- excerpt, err := bt.repo.ResolveBugExcerpt(id)
+ excerpt, err := bt.repo.Bugs().ResolveExcerpt(id)
if err != nil {
return err
}
@@ -319,12 +319,12 @@ func (bt *bugTable) render(v *gocui.View, maxX int) {
labelsTxt.WriteString(lc256.Unescape())
}
- author, err := bt.repo.ResolveIdentityExcerpt(excerpt.AuthorId)
+ author, err := bt.repo.Identities().ResolveExcerpt(excerpt.AuthorId)
if err != nil {
panic(err)
}
- id := text.LeftPadMaxLine(excerpt.Id.Human(), columnWidths["id"], 0)
+ id := text.LeftPadMaxLine(excerpt.Id().Human(), columnWidths["id"], 0)
status := text.LeftPadMaxLine(excerpt.Status.String(), columnWidths["status"], 0)
labels := text.TruncateMax(labelsTxt.String(), minInt(columnWidths["title"]-2, 10))
title := text.LeftPadMaxLine(strings.TrimSpace(excerpt.Title), columnWidths["title"]-text.Len(labels), 0)
@@ -451,8 +451,8 @@ func (bt *bugTable) openBug(g *gocui.Gui, v *gocui.View) error {
// There are no open bugs, just do nothing
return nil
}
- id := bt.excerpts[bt.selectCursor].Id
- b, err := bt.repo.ResolveBug(id)
+ id := bt.excerpts[bt.selectCursor].Id()
+ b, err := bt.repo.Bugs().Resolve(id)
if err != nil {
return err
}
diff --git a/termui/label_select.go b/termui/label_select.go
index 2282583d..6721165e 100644
--- a/termui/label_select.go
+++ b/termui/label_select.go
@@ -37,7 +37,7 @@ func newLabelSelect() *labelSelect {
func (ls *labelSelect) SetBug(cache *cache.RepoCache, bug *cache.BugCache) {
ls.cache = cache
ls.bug = bug
- ls.labels = cache.ValidLabels()
+ ls.labels = cache.Bugs().ValidLabels()
// Find which labels are currently applied to the bug
bugLabels := bug.Snapshot().Labels
diff --git a/termui/termui.go b/termui/termui.go
index 4dd6e27d..79577ba9 100644
--- a/termui/termui.go
+++ b/termui/termui.go
@@ -200,7 +200,7 @@ func newBugWithEditor(repo *cache.RepoCache) error {
return errTerminateMainloop
} else {
- b, _, err = repo.NewBug(
+ b, _, err = repo.Bugs().New(
text.CleanupOneLine(title),
text.Cleanup(message),
)
diff --git a/util/multierr/errwaitgroup.go b/util/multierr/errwaitgroup.go
new file mode 100644
index 00000000..7279ed1f
--- /dev/null
+++ b/util/multierr/errwaitgroup.go
@@ -0,0 +1,115 @@
+package multierr
+
+import (
+ "context"
+ "fmt"
+ "sync"
+)
+
+type token struct{}
+
+// A ErrWaitGroup is a collection of goroutines working on subtasks that are part of
+// the same overall task.
+//
+// A zero ErrWaitGroup is valid, has no limit on the number of active goroutines,
+// and does not cancel on error.
+type ErrWaitGroup struct {
+ cancel func()
+
+ wg sync.WaitGroup
+
+ sem chan token
+
+ mu sync.Mutex
+ err error
+}
+
+func (g *ErrWaitGroup) done() {
+ if g.sem != nil {
+ <-g.sem
+ }
+ g.wg.Done()
+}
+
+// WithContext returns a new ErrWaitGroup and an associated Context derived from ctx.
+//
+// The derived Context is canceled the first time Wait returns.
+func WithContext(ctx context.Context) (*ErrWaitGroup, context.Context) {
+ ctx, cancel := context.WithCancel(ctx)
+ return &ErrWaitGroup{cancel: cancel}, ctx
+}
+
+// Wait blocks until all function calls from the Go method have returned, then
+// returns the combined non-nil errors (if any) from them.
+func (g *ErrWaitGroup) Wait() error {
+ g.wg.Wait()
+ if g.cancel != nil {
+ g.cancel()
+ }
+ return g.err
+}
+
+// Go calls the given function in a new goroutine.
+// It blocks until the new goroutine can be added without the number of
+// active goroutines in the group exceeding the configured limit.
+func (g *ErrWaitGroup) Go(f func() error) {
+ if g.sem != nil {
+ g.sem <- token{}
+ }
+
+ g.wg.Add(1)
+ go func() {
+ defer g.done()
+
+ if err := f(); err != nil {
+ g.mu.Lock()
+ g.err = Join(g.err, err)
+ g.mu.Unlock()
+ }
+ }()
+}
+
+// TryGo calls the given function in a new goroutine only if the number of
+// active goroutines in the group is currently below the configured limit.
+//
+// The return value reports whether the goroutine was started.
+func (g *ErrWaitGroup) TryGo(f func() error) bool {
+ if g.sem != nil {
+ select {
+ case g.sem <- token{}:
+ // Note: this allows barging iff channels in general allow barging.
+ default:
+ return false
+ }
+ }
+
+ g.wg.Add(1)
+ go func() {
+ defer g.done()
+
+ if err := f(); err != nil {
+ g.mu.Lock()
+ err = Join(g.err, err)
+ g.mu.Unlock()
+ }
+ }()
+ return true
+}
+
+// SetLimit limits the number of active goroutines in this group to at most n.
+// A negative value indicates no limit.
+//
+// Any subsequent call to the Go method will block until it can add an active
+// goroutine without exceeding the configured limit.
+//
+// The limit must not be modified while any goroutines in the group are active.
+func (g *ErrWaitGroup) SetLimit(n int) {
+ if n < 0 {
+ g.sem = nil
+ return
+ }
+ if len(g.sem) != 0 {
+ panic(fmt.Errorf("errwaitgroup: modify limit while %v goroutines in the group are still active", len(g.sem)))
+ }
+ g.sem = make(chan token, n)
+}
diff --git a/util/multierr/join.go b/util/multierr/join.go
new file mode 100644
index 00000000..880ba095
--- /dev/null
+++ b/util/multierr/join.go
@@ -0,0 +1,51 @@
+package multierr
+
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Join returns an error that wraps the given errors.
+// Any nil error values are discarded.
+// Join returns nil if errs contains no non-nil values.
+// The error formats as the concatenation of the strings obtained
+// by calling the Error method of each element of errs, with a newline
+// between each string.
+func Join(errs ...error) error {
+ n := 0
+ for _, err := range errs {
+ if err != nil {
+ n++
+ }
+ }
+ if n == 0 {
+ return nil
+ }
+ e := &joinError{
+ errs: make([]error, 0, n),
+ }
+ for _, err := range errs {
+ if err != nil {
+ e.errs = append(e.errs, err)
+ }
+ }
+ return e
+}
+
+type joinError struct {
+ errs []error
+}
+
+func (e *joinError) Error() string {
+ var b []byte
+ for i, err := range e.errs {
+ if i > 0 {
+ b = append(b, '\n')
+ }
+ b = append(b, err.Error()...)
+ }
+ return string(b)
+}
+
+func (e *joinError) Unwrap() []error {
+ return e.errs
+}