aboutsummaryrefslogtreecommitdiffstats
path: root/bug
diff options
context:
space:
mode:
Diffstat (limited to 'bug')
-rw-r--r--bug/bug.go703
-rw-r--r--bug/bug_actions.go116
-rw-r--r--bug/bug_actions_test.go390
-rw-r--r--bug/bug_test.go183
-rw-r--r--bug/clocks.go40
-rw-r--r--bug/err.go17
-rw-r--r--bug/identity.go27
-rw-r--r--bug/interface.go8
-rw-r--r--bug/op_add_comment.go24
-rw-r--r--bug/op_add_comment_test.go10
-rw-r--r--bug/op_create.go59
-rw-r--r--bug/op_create_test.go38
-rw-r--r--bug/op_edit_comment.go17
-rw-r--r--bug/op_edit_comment_test.go71
-rw-r--r--bug/op_label_change.go17
-rw-r--r--bug/op_label_change_test.go18
-rw-r--r--bug/op_noop.go13
-rw-r--r--bug/op_noop_test.go10
-rw-r--r--bug/op_set_metadata.go26
-rw-r--r--bug/op_set_metadata_test.go57
-rw-r--r--bug/op_set_status.go17
-rw-r--r--bug/op_set_status_test.go18
-rw-r--r--bug/op_set_title.go29
-rw-r--r--bug/op_set_title_test.go18
-rw-r--r--bug/operation.go200
-rw-r--r--bug/operation_iterator.go72
-rw-r--r--bug/operation_iterator_test.go78
-rw-r--r--bug/operation_pack.go188
-rw-r--r--bug/operation_pack_test.go79
-rw-r--r--bug/operation_test.go38
-rw-r--r--bug/snapshot.go5
-rw-r--r--bug/sorting.go8
-rw-r--r--bug/with_snapshot.go8
33 files changed, 503 insertions, 2099 deletions
diff --git a/bug/bug.go b/bug/bug.go
index f6c35a2d..9d19a42c 100644
--- a/bug/bug.go
+++ b/bug/bug.go
@@ -2,277 +2,62 @@
package bug
import (
- "encoding/json"
"fmt"
- "strings"
-
- "github.com/pkg/errors"
"github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/entity/dag"
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
- "github.com/MichaelMure/git-bug/util/lamport"
)
-const bugsRefPattern = "refs/bugs/"
-const bugsRemoteRefPattern = "refs/remotes/%s/bugs/"
-
-const opsEntryName = "ops"
-const rootEntryName = "root"
-const mediaEntryName = "media"
-
-const createClockEntryPrefix = "create-clock-"
-const createClockEntryPattern = "create-clock-%d"
-const editClockEntryPrefix = "edit-clock-"
-const editClockEntryPattern = "edit-clock-%d"
-
-const creationClockName = "bug-create"
-const editClockName = "bug-edit"
-
-var ErrBugNotExist = errors.New("bug doesn't exist")
+var _ Interface = &Bug{}
+var _ entity.Interface = &Bug{}
-func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch {
- return entity.NewErrMultipleMatch("bug", matching)
-}
+// 1: original format
+// 2: no more legacy identities
+// 3: Ids are generated from the create operation serialized data instead of from the first git commit
+// 4: with DAG entity framework
+const formatVersion = 4
-func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch {
- return entity.NewErrMultipleMatch("operation", matching)
+var def = dag.Definition{
+ Typename: "bug",
+ Namespace: "bugs",
+ OperationUnmarshaler: operationUnmarshaller,
+ FormatVersion: formatVersion,
}
-var _ Interface = &Bug{}
-var _ entity.Interface = &Bug{}
+var ClockLoader = dag.ClockLoader(def)
// Bug hold the data of a bug thread, organized in a way close to
// how it will be persisted inside Git. This is the data structure
// used to merge two different version of the same Bug.
type Bug struct {
-
- // A Lamport clock is a logical clock that allow to order event
- // inside a distributed system.
- // It must be the first field in this struct due to https://github.com/golang/go/issues/599
- createTime lamport.Time
- editTime lamport.Time
-
- // Id used as unique identifier
- id entity.Id
-
- lastCommit repository.Hash
- rootPack repository.Hash
-
- // all the committed operations
- packs []OperationPack
-
- // a temporary pack of operations used for convenience to pile up new operations
- // before a commit
- staging OperationPack
+ *dag.Entity
}
// NewBug create a new Bug
func NewBug() *Bug {
- // No id yet
- // No logical clock yet
- return &Bug{}
-}
-
-// ReadLocal will read a local bug from its hash
-func ReadLocal(repo repository.ClockedRepo, id entity.Id) (*Bug, error) {
- ref := bugsRefPattern + id.String()
- return read(repo, identity.NewSimpleResolver(repo), ref)
-}
-
-// ReadLocalWithResolver will read a local bug from its hash
-func ReadLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, id entity.Id) (*Bug, error) {
- ref := bugsRefPattern + id.String()
- return read(repo, identityResolver, ref)
-}
-
-// ReadRemote will read a remote bug from its hash
-func ReadRemote(repo repository.ClockedRepo, remote string, id entity.Id) (*Bug, error) {
- ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String()
- return read(repo, identity.NewSimpleResolver(repo), ref)
-}
-
-// ReadRemoteWithResolver will read a remote bug from its hash
-func ReadRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string, id entity.Id) (*Bug, error) {
- ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String()
- return read(repo, identityResolver, ref)
-}
-
-// read will read and parse a Bug from git
-func read(repo repository.ClockedRepo, identityResolver identity.Resolver, ref string) (*Bug, error) {
- refSplit := strings.Split(ref, "/")
- id := entity.Id(refSplit[len(refSplit)-1])
-
- if err := id.Validate(); err != nil {
- return nil, errors.Wrap(err, "invalid ref ")
- }
-
- hashes, err := repo.ListCommits(ref)
-
- // TODO: this is not perfect, it might be a command invoke error
- if err != nil {
- return nil, ErrBugNotExist
- }
-
- bug := Bug{
- id: id,
- editTime: 0,
- }
-
- // Load each OperationPack
- for _, hash := range hashes {
- entries, err := repo.ReadTree(hash)
- if err != nil {
- return nil, errors.Wrap(err, "can't list git tree entries")
- }
-
- bug.lastCommit = hash
-
- var opsEntry repository.TreeEntry
- opsFound := false
- var rootEntry repository.TreeEntry
- rootFound := false
- var createTime uint64
- var editTime uint64
-
- for _, entry := range entries {
- if entry.Name == opsEntryName {
- opsEntry = entry
- opsFound = true
- continue
- }
- if entry.Name == rootEntryName {
- rootEntry = entry
- rootFound = true
- }
- if strings.HasPrefix(entry.Name, createClockEntryPrefix) {
- n, err := fmt.Sscanf(entry.Name, createClockEntryPattern, &createTime)
- if err != nil {
- return nil, errors.Wrap(err, "can't read create lamport time")
- }
- if n != 1 {
- return nil, fmt.Errorf("could not parse create time lamport value")
- }
- }
- if strings.HasPrefix(entry.Name, editClockEntryPrefix) {
- n, err := fmt.Sscanf(entry.Name, editClockEntryPattern, &editTime)
- if err != nil {
- return nil, errors.Wrap(err, "can't read edit lamport time")
- }
- if n != 1 {
- return nil, fmt.Errorf("could not parse edit time lamport value")
- }
- }
- }
-
- if !opsFound {
- return nil, errors.New("invalid tree, missing the ops entry")
- }
- if !rootFound {
- return nil, errors.New("invalid tree, missing the root entry")
- }
-
- if bug.rootPack == "" {
- bug.rootPack = rootEntry.Hash
- bug.createTime = lamport.Time(createTime)
- }
-
- // Due to rebase, edit Lamport time are not necessarily ordered
- if editTime > uint64(bug.editTime) {
- bug.editTime = lamport.Time(editTime)
- }
-
- // Update the clocks
- createClock, err := repo.GetOrCreateClock(creationClockName)
- if err != nil {
- return nil, err
- }
- if err := createClock.Witness(bug.createTime); err != nil {
- return nil, errors.Wrap(err, "failed to update create lamport clock")
- }
- editClock, err := repo.GetOrCreateClock(editClockName)
- if err != nil {
- return nil, err
- }
- if err := editClock.Witness(bug.editTime); err != nil {
- return nil, errors.Wrap(err, "failed to update edit lamport clock")
- }
-
- data, err := repo.ReadData(opsEntry.Hash)
- if err != nil {
- return nil, errors.Wrap(err, "failed to read git blob data")
- }
-
- opp := &OperationPack{}
- err = json.Unmarshal(data, &opp)
-
- if err != nil {
- return nil, errors.Wrap(err, "failed to decode OperationPack json")
- }
-
- // tag the pack with the commit hash
- opp.commitHash = hash
-
- bug.packs = append(bug.packs, *opp)
+ return &Bug{
+ Entity: dag.New(def),
}
+}
- // Make sure that the identities are properly loaded
- err = bug.EnsureIdentities(identityResolver)
+// Read will read a bug from a repository
+func Read(repo repository.ClockedRepo, id entity.Id) (*Bug, error) {
+ e, err := dag.Read(def, repo, identity.NewSimpleResolver(repo), id)
if err != nil {
return nil, err
}
-
- return &bug, nil
+ return &Bug{Entity: e}, nil
}
-// RemoveBug will remove a local bug from its entity.Id
-func RemoveBug(repo repository.ClockedRepo, id entity.Id) error {
- var fullMatches []string
-
- refs, err := repo.ListRefs(bugsRefPattern + id.String())
- if err != nil {
- return err
- }
- if len(refs) > 1 {
- return NewErrMultipleMatchBug(entity.RefsToIds(refs))
- }
- if len(refs) == 1 {
- // we have the bug locally
- fullMatches = append(fullMatches, refs[0])
- }
-
- remotes, err := repo.GetRemotes()
+// ReadWithResolver will read a bug from its Id, with a custom identity.Resolver
+func ReadWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, id entity.Id) (*Bug, error) {
+ e, err := dag.Read(def, repo, identityResolver, id)
if err != nil {
- return err
- }
-
- for remote := range remotes {
- remotePrefix := fmt.Sprintf(bugsRemoteRefPattern+id.String(), remote)
- remoteRefs, err := repo.ListRefs(remotePrefix)
- if err != nil {
- return err
- }
- if len(remoteRefs) > 1 {
- return NewErrMultipleMatchBug(entity.RefsToIds(refs))
- }
- if len(remoteRefs) == 1 {
- // found the bug in a remote
- fullMatches = append(fullMatches, remoteRefs[0])
- }
- }
-
- if len(fullMatches) == 0 {
- return ErrBugNotExist
- }
-
- for _, ref := range fullMatches {
- err = repo.RemoveRef(ref)
- if err != nil {
- return err
- }
+ return nil, err
}
-
- return nil
+ return &Bug{Entity: e}, nil
}
type StreamedBug struct {
@@ -280,50 +65,33 @@ type StreamedBug struct {
Err error
}
-// ReadAllLocal read and parse all local bugs
-func ReadAllLocal(repo repository.ClockedRepo) <-chan StreamedBug {
- return readAll(repo, identity.NewSimpleResolver(repo), bugsRefPattern)
-}
-
-// ReadAllLocalWithResolver read and parse all local bugs
-func ReadAllLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug {
- return readAll(repo, identityResolver, bugsRefPattern)
-}
-
-// ReadAllRemote read and parse all remote bugs for a given remote
-func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan StreamedBug {
- refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote)
- return readAll(repo, identity.NewSimpleResolver(repo), refPrefix)
+// ReadAll read and parse all local bugs
+func ReadAll(repo repository.ClockedRepo) <-chan StreamedBug {
+ return readAll(repo, identity.NewSimpleResolver(repo))
}
-// ReadAllRemoteWithResolver read and parse all remote bugs for a given remote
-func ReadAllRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string) <-chan StreamedBug {
- refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote)
- return readAll(repo, identityResolver, refPrefix)
+// ReadAllWithResolver read and parse all local bugs
+func ReadAllWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug {
+ return readAll(repo, identityResolver)
}
// Read and parse all available bug with a given ref prefix
-func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver, refPrefix string) <-chan StreamedBug {
+func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug {
out := make(chan StreamedBug)
go func() {
defer close(out)
- refs, err := repo.ListRefs(refPrefix)
- if err != nil {
- out <- StreamedBug{Err: err}
- return
- }
-
- for _, ref := range refs {
- b, err := read(repo, identityResolver, ref)
-
- if err != nil {
- out <- StreamedBug{Err: err}
- return
+ for streamedEntity := range dag.ReadAll(def, repo, identityResolver) {
+ if streamedEntity.Err != nil {
+ out <- StreamedBug{
+ Err: streamedEntity.Err,
+ }
+ } else {
+ out <- StreamedBug{
+ Bug: &Bug{Entity: streamedEntity.Entity},
+ }
}
-
- out <- StreamedBug{Bug: b}
}
}()
@@ -332,399 +100,78 @@ func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver, re
// ListLocalIds list all the available local bug ids
func ListLocalIds(repo repository.Repo) ([]entity.Id, error) {
- refs, err := repo.ListRefs(bugsRefPattern)
- if err != nil {
- return nil, err
- }
-
- return entity.RefsToIds(refs), nil
+ return dag.ListLocalIds(def, repo)
}
// Validate check if the Bug data is valid
func (bug *Bug) Validate() error {
- // non-empty
- if len(bug.packs) == 0 && bug.staging.IsEmpty() {
- return fmt.Errorf("bug has no operations")
- }
-
- // check if each pack and operations are valid
- for _, pack := range bug.packs {
- if err := pack.Validate(); err != nil {
- return err
- }
- }
-
- // check if staging is valid if needed
- if !bug.staging.IsEmpty() {
- if err := bug.staging.Validate(); err != nil {
- return errors.Wrap(err, "staging")
- }
+ if err := bug.Entity.Validate(); err != nil {
+ return err
}
// The very first Op should be a CreateOp
firstOp := bug.FirstOp()
- if firstOp == nil || firstOp.base().OperationType != CreateOp {
+ if firstOp == nil || firstOp.Type() != CreateOp {
return fmt.Errorf("first operation should be a Create op")
}
- // The bug Id should be the hash of the first commit
- if len(bug.packs) > 0 && string(bug.packs[0].commitHash) != bug.id.String() {
- return fmt.Errorf("bug id should be the first commit hash")
- }
-
// Check that there is no more CreateOp op
- // Check that there is no colliding operation's ID
- it := NewOperationIterator(bug)
- createCount := 0
- ids := make(map[entity.Id]struct{})
- for it.Next() {
- if it.Value().base().OperationType == CreateOp {
- createCount++
+ for i, op := range bug.Operations() {
+ if i == 0 {
+ continue
}
- if _, ok := ids[it.Value().Id()]; ok {
- return fmt.Errorf("id collision: %s", it.Value().Id())
+ if op.Type() == CreateOp {
+ return fmt.Errorf("only one Create op allowed")
}
- ids[it.Value().Id()] = struct{}{}
- }
-
- if createCount != 1 {
- return fmt.Errorf("only one Create op allowed")
}
return nil
}
-// Append an operation into the staging area, to be committed later
+// Append add a new Operation to the Bug
func (bug *Bug) Append(op Operation) {
- bug.staging.Append(op)
+ bug.Entity.Append(op)
}
-// Commit write the staging area in Git and move the operations to the packs
-func (bug *Bug) Commit(repo repository.ClockedRepo) error {
-
- if !bug.NeedCommit() {
- return fmt.Errorf("can't commit a bug with no pending operation")
- }
-
- if err := bug.Validate(); err != nil {
- return errors.Wrap(err, "can't commit a bug with invalid data")
+// Operations return the ordered operations
+func (bug *Bug) Operations() []Operation {
+ source := bug.Entity.Operations()
+ result := make([]Operation, len(source))
+ for i, op := range source {
+ result[i] = op.(Operation)
}
-
- // Write the Ops as a Git blob containing the serialized array
- hash, err := bug.staging.Write(repo)
- if err != nil {
- return err
- }
-
- if bug.rootPack == "" {
- bug.rootPack = hash
- }
-
- // Make a Git tree referencing this blob
- tree := []repository.TreeEntry{
- // the last pack of ops
- {ObjectType: repository.Blob, Hash: hash, Name: opsEntryName},
- // always the first pack of ops (might be the same)
- {ObjectType: repository.Blob, Hash: bug.rootPack, Name: rootEntryName},
- }
-
- // Reference, if any, all the files required by the ops
- // Git will check that they actually exist in the storage and will make sure
- // to push/pull them as needed.
- mediaTree := makeMediaTree(bug.staging)
- if len(mediaTree) > 0 {
- mediaTreeHash, err := repo.StoreTree(mediaTree)
- if err != nil {
- return err
- }
- tree = append(tree, repository.TreeEntry{
- ObjectType: repository.Tree,
- Hash: mediaTreeHash,
- Name: mediaEntryName,
- })
- }
-
- // Store the logical clocks as well
- // --> edit clock for each OperationPack/commits
- // --> create clock only for the first OperationPack/commits
- //
- // To avoid having one blob for each clock value, clocks are serialized
- // directly into the entry name
- emptyBlobHash, err := repo.StoreData([]byte{})
- if err != nil {
- return err
- }
-
- editClock, err := repo.GetOrCreateClock(editClockName)
- if err != nil {
- return err
- }
- bug.editTime, err = editClock.Increment()
- if err != nil {
- return err
- }
-
- tree = append(tree, repository.TreeEntry{
- ObjectType: repository.Blob,
- Hash: emptyBlobHash,
- Name: fmt.Sprintf(editClockEntryPattern, bug.editTime),
- })
- if bug.lastCommit == "" {
- createClock, err := repo.GetOrCreateClock(creationClockName)
- if err != nil {
- return err
- }
- bug.createTime, err = createClock.Increment()
- if err != nil {
- return err
- }
-
- tree = append(tree, repository.TreeEntry{
- ObjectType: repository.Blob,
- Hash: emptyBlobHash,
- Name: fmt.Sprintf(createClockEntryPattern, bug.createTime),
- })
- }
-
- // Store the tree
- hash, err = repo.StoreTree(tree)
- if err != nil {
- return err
- }
-
- // Write a Git commit referencing the tree, with the previous commit as parent
- if bug.lastCommit != "" {
- hash, err = repo.StoreCommitWithParent(hash, bug.lastCommit)
- } else {
- hash, err = repo.StoreCommit(hash)
- }
-
- if err != nil {
- return err
- }
-
- bug.lastCommit = hash
-
- // if it was the first commit, use the commit hash as bug id
- if bug.id == "" {
- bug.id = entity.Id(hash)
- }
-
- // Create or update the Git reference for this bug
- // When pushing later, the remote will ensure that this ref update
- // is fast-forward, that is no data has been overwritten
- ref := fmt.Sprintf("%s%s", bugsRefPattern, bug.id)
- err = repo.UpdateRef(ref, hash)
-
- if err != nil {
- return err
- }
-
- bug.staging.commitHash = hash
- bug.packs = append(bug.packs, bug.staging)
- bug.staging = OperationPack{}
-
- return nil
-}
-
-func (bug *Bug) CommitAsNeeded(repo repository.ClockedRepo) error {
- if !bug.NeedCommit() {
- return nil
- }
- return bug.Commit(repo)
-}
-
-func (bug *Bug) NeedCommit() bool {
- return !bug.staging.IsEmpty()
+ return result
}
-func makeMediaTree(pack OperationPack) []repository.TreeEntry {
- var tree []repository.TreeEntry
- counter := 0
- added := make(map[repository.Hash]interface{})
-
- for _, ops := range pack.Operations {
- for _, file := range ops.GetFiles() {
- if _, has := added[file]; !has {
- tree = append(tree, repository.TreeEntry{
- ObjectType: repository.Blob,
- Hash: file,
- // The name is not important here, we only need to
- // reference the blob.
- Name: fmt.Sprintf("file%d", counter),
- })
- counter++
- added[file] = struct{}{}
- }
- }
- }
-
- return tree
-}
-
-// Merge a different version of the same bug by rebasing operations of this bug
-// that are not present in the other on top of the chain of operations of the
-// other version.
-func (bug *Bug) Merge(repo repository.Repo, other Interface) (bool, error) {
- var otherBug = bugFromInterface(other)
-
- // Note: a faster merge should be possible without actually reading and parsing
- // all operations pack of our side.
- // Reading the other side is still necessary to validate remote data, at least
- // for new operations
-
- if bug.id != otherBug.id {
- return false, errors.New("merging unrelated bugs is not supported")
- }
-
- if len(otherBug.staging.Operations) > 0 {
- return false, errors.New("merging a bug with a non-empty staging is not supported")
- }
-
- if bug.lastCommit == "" || otherBug.lastCommit == "" {
- return false, errors.New("can't merge a bug that has never been stored")
- }
-
- ancestor, err := repo.FindCommonAncestor(bug.lastCommit, otherBug.lastCommit)
- if err != nil {
- return false, errors.Wrap(err, "can't find common ancestor")
- }
-
- ancestorIndex := 0
- newPacks := make([]OperationPack, 0, len(bug.packs))
-
- // Find the root of the rebase
- for i, pack := range bug.packs {
- newPacks = append(newPacks, pack)
-
- if pack.commitHash == ancestor {
- ancestorIndex = i
- break
- }
- }
-
- if len(otherBug.packs) == ancestorIndex+1 {
- // Nothing to rebase, return early
- return false, nil
- }
-
- // get other bug's extra packs
- for i := ancestorIndex + 1; i < len(otherBug.packs); i++ {
- // clone is probably not necessary
- newPack := otherBug.packs[i].Clone()
-
- newPacks = append(newPacks, newPack)
- bug.lastCommit = newPack.commitHash
- }
-
- // rebase our extra packs
- for i := ancestorIndex + 1; i < len(bug.packs); i++ {
- pack := bug.packs[i]
-
- // get the referenced git tree
- treeHash, err := repo.GetTreeHash(pack.commitHash)
-
- if err != nil {
- return false, err
- }
-
- // create a new commit with the correct ancestor
- hash, err := repo.StoreCommitWithParent(treeHash, bug.lastCommit)
-
- if err != nil {
- return false, err
- }
-
- // replace the pack
- newPack := pack.Clone()
- newPack.commitHash = hash
- newPacks = append(newPacks, newPack)
-
- // update the bug
- bug.lastCommit = hash
- }
-
- bug.packs = newPacks
-
- // Update the git ref
- err = repo.UpdateRef(bugsRefPattern+bug.id.String(), bug.lastCommit)
- if err != nil {
- return false, err
+// Compile a bug in a easily usable snapshot
+func (bug *Bug) Compile() Snapshot {
+ snap := Snapshot{
+ id: bug.Id(),
+ Status: OpenStatus,
}
- return true, nil
-}
-
-// Id return the Bug identifier
-func (bug *Bug) Id() entity.Id {
- if bug.id == "" {
- // simply panic as it would be a coding error
- // (using an id of a bug not stored yet)
- panic("no id yet")
+ for _, op := range bug.Operations() {
+ op.Apply(&snap)
+ snap.Operations = append(snap.Operations, op)
}
- return bug.id
-}
-
-// CreateLamportTime return the Lamport time of creation
-func (bug *Bug) CreateLamportTime() lamport.Time {
- return bug.createTime
-}
-// EditLamportTime return the Lamport time of the last edit
-func (bug *Bug) EditLamportTime() lamport.Time {
- return bug.editTime
+ return snap
}
// Lookup for the very first operation of the bug.
// For a valid Bug, this operation should be a CreateOp
func (bug *Bug) FirstOp() Operation {
- for _, pack := range bug.packs {
- for _, op := range pack.Operations {
- return op
- }
- }
-
- if !bug.staging.IsEmpty() {
- return bug.staging.Operations[0]
+ if fo := bug.Entity.FirstOp(); fo != nil {
+ return fo.(Operation)
}
-
return nil
}
// Lookup for the very last operation of the bug.
// For a valid Bug, should never be nil
func (bug *Bug) LastOp() Operation {
- if !bug.staging.IsEmpty() {
- return bug.staging.Operations[len(bug.staging.Operations)-1]
- }
-
- if len(bug.packs) == 0 {
- return nil
- }
-
- lastPack := bug.packs[len(bug.packs)-1]
-
- if len(lastPack.Operations) == 0 {
- return nil
- }
-
- return lastPack.Operations[len(lastPack.Operations)-1]
-}
-
-// Compile a bug in a easily usable snapshot
-func (bug *Bug) Compile() Snapshot {
- snap := Snapshot{
- id: bug.id,
- Status: OpenStatus,
+ if lo := bug.Entity.LastOp(); lo != nil {
+ return lo.(Operation)
}
-
- it := NewOperationIterator(bug)
-
- for it.Next() {
- op := it.Value()
- op.Apply(&snap)
- snap.Operations = append(snap.Operations, op)
- }
-
- return snap
+ return nil
}
diff --git a/bug/bug_actions.go b/bug/bug_actions.go
index 21ce3733..420fb08a 100644
--- a/bug/bug_actions.go
+++ b/bug/bug_actions.go
@@ -1,42 +1,34 @@
package bug
import (
- "fmt"
- "strings"
+ "github.com/pkg/errors"
"github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/entity/dag"
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
- "github.com/pkg/errors"
)
// Fetch retrieve updates from a remote
// This does not change the local bugs state
func Fetch(repo repository.Repo, remote string) (string, error) {
- // "refs/bugs/*:refs/remotes/<remote>>/bugs/*"
- remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote)
- fetchRefSpec := fmt.Sprintf("%s*:%s*", bugsRefPattern, remoteRefSpec)
-
- return repo.FetchRefs(remote, fetchRefSpec)
+ return dag.Fetch(def, repo, remote)
}
// Push update a remote with the local changes
func Push(repo repository.Repo, remote string) (string, error) {
- // "refs/bugs/*:refs/bugs/*"
- refspec := fmt.Sprintf("%s*:%s*", bugsRefPattern, bugsRefPattern)
-
- return repo.PushRefs(remote, refspec)
+ return dag.Push(def, repo, remote)
}
// Pull will do a Fetch + MergeAll
// This function will return an error if a merge fail
-func Pull(repo repository.ClockedRepo, remote string) error {
+func Pull(repo repository.ClockedRepo, remote string, author identity.Interface) error {
_, err := Fetch(repo, remote)
if err != nil {
return err
}
- for merge := range MergeAll(repo, remote) {
+ for merge := range MergeAll(repo, remote, author) {
if merge.Err != nil {
return merge.Err
}
@@ -48,96 +40,38 @@ func Pull(repo repository.ClockedRepo, remote string) error {
return nil
}
-// MergeAll will merge all the available remote bug:
-//
-// - If the remote has new commit, the local bug is updated to match the same history
-// (fast-forward update)
-// - if the local bug has new commits but the remote don't, nothing is changed
-// - if both local and remote bug have new commits (that is, we have a concurrent edition),
-// new local commits are rewritten at the head of the remote history (that is, a rebase)
-func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeResult {
- out := make(chan entity.MergeResult)
-
+// MergeAll will merge all the available remote bug
+// Note: an author is necessary for the case where a merge commit is created, as this commit will
+// have an author and may be signed if a signing key is available.
+func MergeAll(repo repository.ClockedRepo, remote string, author identity.Interface) <-chan entity.MergeResult {
// no caching for the merge, we load everything from git even if that means multiple
// copy of the same entity in memory. The cache layer will intercept the results to
// invalidate entities if necessary.
identityResolver := identity.NewSimpleResolver(repo)
+ out := make(chan entity.MergeResult)
+
go func() {
defer close(out)
- remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote)
- remoteRefs, err := repo.ListRefs(remoteRefSpec)
+ results := dag.MergeAll(def, repo, identityResolver, remote, author)
- if err != nil {
- out <- entity.MergeResult{Err: err}
- return
- }
-
- for _, remoteRef := range remoteRefs {
- refSplit := strings.Split(remoteRef, "/")
- id := entity.Id(refSplit[len(refSplit)-1])
-
- if err := id.Validate(); err != nil {
- out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error())
- continue
- }
-
- remoteBug, err := read(repo, identityResolver, remoteRef)
-
- if err != nil {
- out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is not readable").Error())
- continue
- }
-
- // Check for error in remote data
- if err := remoteBug.Validate(); err != nil {
- out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is invalid").Error())
- continue
- }
-
- localRef := bugsRefPattern + remoteBug.Id().String()
- localExist, err := repo.RefExist(localRef)
-
- if err != nil {
- out <- entity.NewMergeError(err, id)
- continue
- }
-
- // the bug is not local yet, simply create the reference
- if !localExist {
- err := repo.CopyRef(remoteRef, localRef)
-
- if err != nil {
- out <- entity.NewMergeError(err, id)
- return
+ // wrap the dag.Entity into a complete Bug
+ for result := range results {
+ result := result
+ if result.Entity != nil {
+ result.Entity = &Bug{
+ Entity: result.Entity.(*dag.Entity),
}
-
- out <- entity.NewMergeStatus(entity.MergeStatusNew, id, remoteBug)
- continue
- }
-
- localBug, err := read(repo, identityResolver, localRef)
-
- if err != nil {
- out <- entity.NewMergeError(errors.Wrap(err, "local bug is not readable"), id)
- return
- }
-
- updated, err := localBug.Merge(repo, remoteBug)
-
- if err != nil {
- out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "merge failed").Error())
- return
- }
-
- if updated {
- out <- entity.NewMergeStatus(entity.MergeStatusUpdated, id, localBug)
- } else {
- out <- entity.NewMergeStatus(entity.MergeStatusNothing, id, localBug)
}
+ out <- result
}
}()
return out
}
+
+// RemoveBug will remove a local bug from its entity.Id
+func RemoveBug(repo repository.ClockedRepo, id entity.Id) error {
+ return dag.Remove(def, repo, id)
+}
diff --git a/bug/bug_actions_test.go b/bug/bug_actions_test.go
deleted file mode 100644
index df35a5e5..00000000
--- a/bug/bug_actions_test.go
+++ /dev/null
@@ -1,390 +0,0 @@
-package bug
-
-import (
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-
- "github.com/MichaelMure/git-bug/identity"
- "github.com/MichaelMure/git-bug/repository"
-)
-
-func TestPushPull(t *testing.T) {
- repoA, repoB, remote := repository.SetupReposAndRemote()
- defer repository.CleanupTestRepos(repoA, repoB, remote)
-
- reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := reneA.Commit(repoA)
- require.NoError(t, err)
-
- bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message")
- require.NoError(t, err)
- assert.True(t, bug1.NeedCommit())
- err = bug1.Commit(repoA)
- require.NoError(t, err)
- assert.False(t, bug1.NeedCommit())
-
- // distribute the identity
- _, err = identity.Push(repoA, "origin")
- require.NoError(t, err)
- err = identity.Pull(repoB, "origin")
- require.NoError(t, err)
-
- // A --> remote --> B
- _, err = Push(repoA, "origin")
- require.NoError(t, err)
-
- err = Pull(repoB, "origin")
- require.NoError(t, err)
-
- bugs := allBugs(t, ReadAllLocal(repoB))
-
- if len(bugs) != 1 {
- t.Fatal("Unexpected number of bugs")
- }
-
- // B --> remote --> A
- reneB, err := identity.ReadLocal(repoA, reneA.Id())
- require.NoError(t, err)
-
- bug2, _, err := Create(reneB, time.Now().Unix(), "bug2", "message")
- require.NoError(t, err)
- err = bug2.Commit(repoB)
- require.NoError(t, err)
-
- _, err = Push(repoB, "origin")
- require.NoError(t, err)
-
- err = Pull(repoA, "origin")
- require.NoError(t, err)
-
- bugs = allBugs(t, ReadAllLocal(repoA))
-
- if len(bugs) != 2 {
- t.Fatal("Unexpected number of bugs")
- }
-}
-
-func allBugs(t testing.TB, bugs <-chan StreamedBug) []*Bug {
- var result []*Bug
- for streamed := range bugs {
- if streamed.Err != nil {
- t.Fatal(streamed.Err)
- }
- result = append(result, streamed.Bug)
- }
- return result
-}
-
-func TestRebaseTheirs(t *testing.T) {
- _RebaseTheirs(t)
-}
-
-func BenchmarkRebaseTheirs(b *testing.B) {
- for n := 0; n < b.N; n++ {
- _RebaseTheirs(b)
- }
-}
-
-func _RebaseTheirs(t testing.TB) {
- repoA, repoB, remote := repository.SetupReposAndRemote()
- defer repository.CleanupTestRepos(repoA, repoB, remote)
-
- reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := reneA.Commit(repoA)
- require.NoError(t, err)
-
- bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message")
- require.NoError(t, err)
- assert.True(t, bug1.NeedCommit())
- err = bug1.Commit(repoA)
- require.NoError(t, err)
- assert.False(t, bug1.NeedCommit())
-
- // distribute the identity
- _, err = identity.Push(repoA, "origin")
- require.NoError(t, err)
- err = identity.Pull(repoB, "origin")
- require.NoError(t, err)
-
- // A --> remote
-
- _, err = Push(repoA, "origin")
- require.NoError(t, err)
-
- // remote --> B
- err = Pull(repoB, "origin")
- require.NoError(t, err)
-
- bug2, err := ReadLocal(repoB, bug1.Id())
- require.NoError(t, err)
- assert.False(t, bug2.NeedCommit())
-
- reneB, err := identity.ReadLocal(repoA, reneA.Id())
- require.NoError(t, err)
-
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message2")
- require.NoError(t, err)
- assert.True(t, bug2.NeedCommit())
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message3")
- require.NoError(t, err)
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message4")
- require.NoError(t, err)
- err = bug2.Commit(repoB)
- require.NoError(t, err)
- assert.False(t, bug2.NeedCommit())
-
- // B --> remote
- _, err = Push(repoB, "origin")
- require.NoError(t, err)
-
- // remote --> A
- err = Pull(repoA, "origin")
- require.NoError(t, err)
-
- bugs := allBugs(t, ReadAllLocal(repoB))
-
- if len(bugs) != 1 {
- t.Fatal("Unexpected number of bugs")
- }
-
- bug3, err := ReadLocal(repoA, bug1.Id())
- require.NoError(t, err)
-
- if nbOps(bug3) != 4 {
- t.Fatal("Unexpected number of operations")
- }
-}
-
-func TestRebaseOurs(t *testing.T) {
- _RebaseOurs(t)
-}
-
-func BenchmarkRebaseOurs(b *testing.B) {
- for n := 0; n < b.N; n++ {
- _RebaseOurs(b)
- }
-}
-
-func _RebaseOurs(t testing.TB) {
- repoA, repoB, remote := repository.SetupReposAndRemote()
- defer repository.CleanupTestRepos(repoA, repoB, remote)
-
- reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := reneA.Commit(repoA)
- require.NoError(t, err)
-
- bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message")
- require.NoError(t, err)
- err = bug1.Commit(repoA)
- require.NoError(t, err)
-
- // distribute the identity
- _, err = identity.Push(repoA, "origin")
- require.NoError(t, err)
- err = identity.Pull(repoB, "origin")
- require.NoError(t, err)
-
- // A --> remote
- _, err = Push(repoA, "origin")
- require.NoError(t, err)
-
- // remote --> B
- err = Pull(repoB, "origin")
- require.NoError(t, err)
-
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4")
- require.NoError(t, err)
- err = bug1.Commit(repoA)
- require.NoError(t, err)
-
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7")
- require.NoError(t, err)
- err = bug1.Commit(repoA)
- require.NoError(t, err)
-
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10")
- require.NoError(t, err)
- err = bug1.Commit(repoA)
- require.NoError(t, err)
-
- // remote --> A
- err = Pull(repoA, "origin")
- require.NoError(t, err)
-
- bugs := allBugs(t, ReadAllLocal(repoA))
-
- if len(bugs) != 1 {
- t.Fatal("Unexpected number of bugs")
- }
-
- bug2, err := ReadLocal(repoA, bug1.Id())
- require.NoError(t, err)
-
- if nbOps(bug2) != 10 {
- t.Fatal("Unexpected number of operations")
- }
-}
-
-func nbOps(b *Bug) int {
- it := NewOperationIterator(b)
- counter := 0
- for it.Next() {
- counter++
- }
- return counter
-}
-
-func TestRebaseConflict(t *testing.T) {
- _RebaseConflict(t)
-}
-
-func BenchmarkRebaseConflict(b *testing.B) {
- for n := 0; n < b.N; n++ {
- _RebaseConflict(b)
- }
-}
-
-func _RebaseConflict(t testing.TB) {
- repoA, repoB, remote := repository.SetupReposAndRemote()
- defer repository.CleanupTestRepos(repoA, repoB, remote)
-
- reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := reneA.Commit(repoA)
- require.NoError(t, err)
-
- bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message")
- require.NoError(t, err)
- err = bug1.Commit(repoA)
- require.NoError(t, err)
-
- // distribute the identity
- _, err = identity.Push(repoA, "origin")
- require.NoError(t, err)
- err = identity.Pull(repoB, "origin")
- require.NoError(t, err)
-
- // A --> remote
- _, err = Push(repoA, "origin")
- require.NoError(t, err)
-
- // remote --> B
- err = Pull(repoB, "origin")
- require.NoError(t, err)
-
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4")
- require.NoError(t, err)
- err = bug1.Commit(repoA)
- require.NoError(t, err)
-
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7")
- require.NoError(t, err)
- err = bug1.Commit(repoA)
- require.NoError(t, err)
-
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9")
- require.NoError(t, err)
- _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10")
- require.NoError(t, err)
- err = bug1.Commit(repoA)
- require.NoError(t, err)
-
- bug2, err := ReadLocal(repoB, bug1.Id())
- require.NoError(t, err)
-
- reneB, err := identity.ReadLocal(repoA, reneA.Id())
- require.NoError(t, err)
-
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message11")
- require.NoError(t, err)
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message12")
- require.NoError(t, err)
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message13")
- require.NoError(t, err)
- err = bug2.Commit(repoB)
- require.NoError(t, err)
-
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message14")
- require.NoError(t, err)
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message15")
- require.NoError(t, err)
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message16")
- require.NoError(t, err)
- err = bug2.Commit(repoB)
- require.NoError(t, err)
-
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message17")
- require.NoError(t, err)
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message18")
- require.NoError(t, err)
- _, err = AddComment(bug2, reneB, time.Now().Unix(), "message19")
- require.NoError(t, err)
- err = bug2.Commit(repoB)
- require.NoError(t, err)
-
- // A --> remote
- _, err = Push(repoA, "origin")
- require.NoError(t, err)
-
- // remote --> B
- err = Pull(repoB, "origin")
- require.NoError(t, err)
-
- bugs := allBugs(t, ReadAllLocal(repoB))
-
- if len(bugs) != 1 {
- t.Fatal("Unexpected number of bugs")
- }
-
- bug3, err := ReadLocal(repoB, bug1.Id())
- require.NoError(t, err)
-
- if nbOps(bug3) != 19 {
- t.Fatal("Unexpected number of operations")
- }
-
- // B --> remote
- _, err = Push(repoB, "origin")
- require.NoError(t, err)
-
- // remote --> A
- err = Pull(repoA, "origin")
- require.NoError(t, err)
-
- bugs = allBugs(t, ReadAllLocal(repoA))
-
- if len(bugs) != 1 {
- t.Fatal("Unexpected number of bugs")
- }
-
- bug4, err := ReadLocal(repoA, bug1.Id())
- require.NoError(t, err)
-
- if nbOps(bug4) != 19 {
- t.Fatal("Unexpected number of operations")
- }
-}
diff --git a/bug/bug_test.go b/bug/bug_test.go
deleted file mode 100644
index d6ef6fa1..00000000
--- a/bug/bug_test.go
+++ /dev/null
@@ -1,183 +0,0 @@
-package bug
-
-import (
- "fmt"
- "testing"
- "time"
-
- "github.com/stretchr/testify/require"
-
- "github.com/MichaelMure/git-bug/identity"
- "github.com/MichaelMure/git-bug/repository"
-)
-
-func TestBugId(t *testing.T) {
- mockRepo := repository.NewMockRepoForTest()
-
- bug1 := NewBug()
-
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(mockRepo)
- require.NoError(t, err)
-
- createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
-
- bug1.Append(createOp)
-
- err = bug1.Commit(mockRepo)
- require.NoError(t, err)
-
- bug1.Id()
-}
-
-func TestBugValidity(t *testing.T) {
- mockRepo := repository.NewMockRepoForTest()
-
- bug1 := NewBug()
-
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(mockRepo)
- require.NoError(t, err)
-
- createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
-
- if bug1.Validate() == nil {
- t.Fatal("Empty bug should be invalid")
- }
-
- bug1.Append(createOp)
-
- if bug1.Validate() != nil {
- t.Fatal("Bug with just a CreateOp should be valid")
- }
-
- err = bug1.Commit(mockRepo)
- if err != nil {
- t.Fatal(err)
- }
-
- bug1.Append(createOp)
-
- if bug1.Validate() == nil {
- t.Fatal("Bug with multiple CreateOp should be invalid")
- }
-
- err = bug1.Commit(mockRepo)
- if err == nil {
- t.Fatal("Invalid bug should not commit")
- }
-}
-
-func TestBugCommitLoad(t *testing.T) {
- repo := repository.NewMockRepoForTest()
-
- bug1 := NewBug()
-
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
- require.NoError(t, err)
-
- createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
- setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1")
- addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil)
-
- bug1.Append(createOp)
- bug1.Append(setTitleOp)
-
- require.True(t, bug1.NeedCommit())
-
- err = bug1.Commit(repo)
- require.Nil(t, err)
- require.False(t, bug1.NeedCommit())
-
- bug2, err := ReadLocal(repo, bug1.Id())
- require.NoError(t, err)
- equivalentBug(t, bug1, bug2)
-
- // add more op
-
- bug1.Append(addCommentOp)
-
- require.True(t, bug1.NeedCommit())
-
- err = bug1.Commit(repo)
- require.Nil(t, err)
- require.False(t, bug1.NeedCommit())
-
- bug3, err := ReadLocal(repo, bug1.Id())
- require.NoError(t, err)
- equivalentBug(t, bug1, bug3)
-}
-
-func equivalentBug(t *testing.T, expected, actual *Bug) {
- require.Equal(t, len(expected.packs), len(actual.packs))
-
- for i := range expected.packs {
- for j := range expected.packs[i].Operations {
- actual.packs[i].Operations[j].base().id = expected.packs[i].Operations[j].base().id
- }
- }
-
- require.Equal(t, expected, actual)
-}
-
-func TestBugRemove(t *testing.T) {
- repo := repository.CreateGoGitTestRepo(false)
- remoteA := repository.CreateGoGitTestRepo(true)
- remoteB := repository.CreateGoGitTestRepo(true)
- defer repository.CleanupTestRepos(repo, remoteA, remoteB)
-
- err := repo.AddRemote("remoteA", remoteA.GetLocalRemote())
- require.NoError(t, err)
-
- err = repo.AddRemote("remoteB", remoteB.GetLocalRemote())
- require.NoError(t, err)
-
- // generate a bunch of bugs
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err = rene.Commit(repo)
- require.NoError(t, err)
-
- for i := 0; i < 100; i++ {
- b := NewBug()
- createOp := NewCreateOp(rene, time.Now().Unix(), "title", fmt.Sprintf("message%v", i), nil)
- b.Append(createOp)
- err = b.Commit(repo)
- require.NoError(t, err)
- }
-
- // and one more for testing
- b := NewBug()
- createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
- b.Append(createOp)
- err = b.Commit(repo)
- require.NoError(t, err)
-
- _, err = Push(repo, "remoteA")
- require.NoError(t, err)
-
- _, err = Push(repo, "remoteB")
- require.NoError(t, err)
-
- _, err = Fetch(repo, "remoteA")
- require.NoError(t, err)
-
- _, err = Fetch(repo, "remoteB")
- require.NoError(t, err)
-
- err = RemoveBug(repo, b.Id())
- require.NoError(t, err)
-
- _, err = ReadLocal(repo, b.Id())
- require.Error(t, ErrBugNotExist, err)
-
- _, err = ReadRemote(repo, "remoteA", b.Id())
- require.Error(t, ErrBugNotExist, err)
-
- _, err = ReadRemote(repo, "remoteB", b.Id())
- require.Error(t, ErrBugNotExist, err)
-
- ids, err := ListLocalIds(repo)
- require.NoError(t, err)
- require.Len(t, ids, 100)
-}
diff --git a/bug/clocks.go b/bug/clocks.go
deleted file mode 100644
index 58fce923..00000000
--- a/bug/clocks.go
+++ /dev/null
@@ -1,40 +0,0 @@
-package bug
-
-import (
- "github.com/MichaelMure/git-bug/identity"
- "github.com/MichaelMure/git-bug/repository"
-)
-
-// ClockLoader is the repository.ClockLoader for the Bug entity
-var ClockLoader = repository.ClockLoader{
- Clocks: []string{creationClockName, editClockName},
- Witnesser: func(repo repository.ClockedRepo) error {
- // We don't care about the actual identity so an IdentityStub will do
- resolver := identity.NewStubResolver()
- for b := range ReadAllLocalWithResolver(repo, resolver) {
- if b.Err != nil {
- return b.Err
- }
-
- createClock, err := repo.GetOrCreateClock(creationClockName)
- if err != nil {
- return err
- }
- err = createClock.Witness(b.Bug.createTime)
- if err != nil {
- return err
- }
-
- editClock, err := repo.GetOrCreateClock(editClockName)
- if err != nil {
- return err
- }
- err = editClock.Witness(b.Bug.editTime)
- if err != nil {
- return err
- }
- }
-
- return nil
- },
-}
diff --git a/bug/err.go b/bug/err.go
new file mode 100644
index 00000000..1bd174bb
--- /dev/null
+++ b/bug/err.go
@@ -0,0 +1,17 @@
+package bug
+
+import (
+ "errors"
+
+ "github.com/MichaelMure/git-bug/entity"
+)
+
+var ErrBugNotExist = errors.New("bug doesn't exist")
+
+func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch {
+ return entity.NewErrMultipleMatch("bug", matching)
+}
+
+func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch {
+ return entity.NewErrMultipleMatch("operation", matching)
+}
diff --git a/bug/identity.go b/bug/identity.go
deleted file mode 100644
index 2eb2bcaf..00000000
--- a/bug/identity.go
+++ /dev/null
@@ -1,27 +0,0 @@
-package bug
-
-import (
- "github.com/MichaelMure/git-bug/identity"
-)
-
-// EnsureIdentities walk the graph of operations and make sure that all Identity
-// are properly loaded. That is, it replace all the IdentityStub with the full
-// Identity, loaded through a Resolver.
-func (bug *Bug) EnsureIdentities(resolver identity.Resolver) error {
- it := NewOperationIterator(bug)
-
- for it.Next() {
- op := it.Value()
- base := op.base()
-
- if stub, ok := base.Author.(*identity.IdentityStub); ok {
- i, err := resolver.ResolveIdentity(stub.Id())
- if err != nil {
- return err
- }
-
- base.Author = i
- }
- }
- return nil
-}
diff --git a/bug/interface.go b/bug/interface.go
index 5c8f2729..e71496a9 100644
--- a/bug/interface.go
+++ b/bug/interface.go
@@ -16,17 +16,15 @@ type Interface interface {
// Append an operation into the staging area, to be committed later
Append(op Operation)
+ // Operations return the ordered operations
+ Operations() []Operation
+
// Indicate that the in-memory state changed and need to be commit in the repository
NeedCommit() bool
// Commit write the staging area in Git and move the operations to the packs
Commit(repo repository.ClockedRepo) error
- // Merge a different version of the same bug by rebasing operations of this bug
- // that are not present in the other on top of the chain of operations of the
- // other version.
- Merge(repo repository.Repo, other Interface) (bool, error)
-
// Lookup for the very first operation of the bug.
// For a valid Bug, this operation should be a CreateOp
FirstOp() Operation
diff --git a/bug/op_add_comment.go b/bug/op_add_comment.go
index 3f19e42e..f835866b 100644
--- a/bug/op_add_comment.go
+++ b/bug/op_add_comment.go
@@ -5,6 +5,7 @@ import (
"fmt"
"github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/entity/dag"
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
"github.com/MichaelMure/git-bug/util/text"
@@ -12,6 +13,7 @@ import (
)
var _ Operation = &AddCommentOperation{}
+var _ dag.OperationWithFiles = &AddCommentOperation{}
// AddCommentOperation will add a new comment in the bug
type AddCommentOperation struct {
@@ -21,25 +23,19 @@ type AddCommentOperation struct {
Files []repository.Hash `json:"files"`
}
-// Sign-post method for gqlgen
-func (op *AddCommentOperation) IsOperation() {}
-
-func (op *AddCommentOperation) base() *OpBase {
- return &op.OpBase
-}
-
func (op *AddCommentOperation) Id() entity.Id {
- return idOperation(op)
+ return idOperation(op, &op.OpBase)
}
func (op *AddCommentOperation) Apply(snapshot *Snapshot) {
- snapshot.addActor(op.Author)
- snapshot.addParticipant(op.Author)
+ snapshot.addActor(op.Author_)
+ snapshot.addParticipant(op.Author_)
+ commentId := entity.CombineIds(snapshot.Id(), op.Id())
comment := Comment{
- id: op.Id(),
+ id: commentId,
Message: op.Message,
- Author: op.Author,
+ Author: op.Author_,
Files: op.Files,
UnixTime: timestamp.Timestamp(op.UnixTime),
}
@@ -47,7 +43,7 @@ func (op *AddCommentOperation) Apply(snapshot *Snapshot) {
snapshot.Comments = append(snapshot.Comments, comment)
item := &AddCommentTimelineItem{
- CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment),
+ CommentTimelineItem: NewCommentTimelineItem(commentId, comment),
}
snapshot.Timeline = append(snapshot.Timeline, item)
@@ -58,7 +54,7 @@ func (op *AddCommentOperation) GetFiles() []repository.Hash {
}
func (op *AddCommentOperation) Validate() error {
- if err := opBaseValidate(op, AddCommentOp); err != nil {
+ if err := op.OpBase.Validate(op, AddCommentOp); err != nil {
return err
}
diff --git a/bug/op_add_comment_test.go b/bug/op_add_comment_test.go
index 8bcc64e1..fb6fa8ed 100644
--- a/bug/op_add_comment_test.go
+++ b/bug/op_add_comment_test.go
@@ -13,9 +13,9 @@ import (
)
func TestAddCommentSerialize(t *testing.T) {
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
@@ -32,8 +32,8 @@ func TestAddCommentSerialize(t *testing.T) {
before.Id()
// Replace the identity stub with the real thing
- assert.Equal(t, rene.Id(), after.base().Author.Id())
- after.Author = rene
+ assert.Equal(t, rene.Id(), after.Author().Id())
+ after.Author_ = rene
assert.Equal(t, before, &after)
}
diff --git a/bug/op_create.go b/bug/op_create.go
index 9bb40d35..75b60bd8 100644
--- a/bug/op_create.go
+++ b/bug/op_create.go
@@ -6,6 +6,7 @@ import (
"strings"
"github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/entity/dag"
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
"github.com/MichaelMure/git-bug/util/text"
@@ -13,6 +14,7 @@ import (
)
var _ Operation = &CreateOperation{}
+var _ dag.OperationWithFiles = &CreateOperation{}
// CreateOperation define the initial creation of a bug
type CreateOperation struct {
@@ -22,37 +24,53 @@ type CreateOperation struct {
Files []repository.Hash `json:"files"`
}
-// Sign-post method for gqlgen
-func (op *CreateOperation) IsOperation() {}
-
-func (op *CreateOperation) base() *OpBase {
- return &op.OpBase
+func (op *CreateOperation) Id() entity.Id {
+ return idOperation(op, &op.OpBase)
}
-func (op *CreateOperation) Id() entity.Id {
- return idOperation(op)
+// OVERRIDE
+func (op *CreateOperation) SetMetadata(key string, value string) {
+ // sanity check: we make sure we are not in the following scenario:
+ // - the bug is created with a first operation
+ // - Id() is used
+ // - metadata are added, which will change the Id
+ // - Id() is used again
+
+ if op.id != entity.UnsetId {
+ panic("usage of Id() after changing the first operation")
+ }
+
+ op.OpBase.SetMetadata(key, value)
}
func (op *CreateOperation) Apply(snapshot *Snapshot) {
- snapshot.addActor(op.Author)
- snapshot.addParticipant(op.Author)
+ // sanity check: will fail when adding a second Create
+ if snapshot.id != "" && snapshot.id != entity.UnsetId && snapshot.id != op.Id() {
+ panic("adding a second Create operation")
+ }
+
+ snapshot.id = op.Id()
+
+ snapshot.addActor(op.Author_)
+ snapshot.addParticipant(op.Author_)
snapshot.Title = op.Title
+ commentId := entity.CombineIds(snapshot.Id(), op.Id())
comment := Comment{
- id: op.Id(),
+ id: commentId,
Message: op.Message,
- Author: op.Author,
+ Author: op.Author_,
UnixTime: timestamp.Timestamp(op.UnixTime),
}
snapshot.Comments = []Comment{comment}
- snapshot.Author = op.Author
+ snapshot.Author = op.Author_
snapshot.CreateTime = op.Time()
snapshot.Timeline = []TimelineItem{
&CreateTimelineItem{
- CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment),
+ CommentTimelineItem: NewCommentTimelineItem(commentId, comment),
},
}
}
@@ -62,18 +80,23 @@ func (op *CreateOperation) GetFiles() []repository.Hash {
}
func (op *CreateOperation) Validate() error {
- if err := opBaseValidate(op, CreateOp); err != nil {
+ if err := op.OpBase.Validate(op, CreateOp); err != nil {
return err
}
+ if len(op.Nonce) > 64 {
+ return fmt.Errorf("create nonce is too big")
+ }
+ if len(op.Nonce) < 20 {
+ return fmt.Errorf("create nonce is too small")
+ }
+
if text.Empty(op.Title) {
return fmt.Errorf("title is empty")
}
-
if strings.Contains(op.Title, "\n") {
return fmt.Errorf("title should be a single line")
}
-
if !text.Safe(op.Title) {
return fmt.Errorf("title is not fully printable")
}
@@ -85,7 +108,7 @@ func (op *CreateOperation) Validate() error {
return nil
}
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
// This workaround is necessary to avoid the inner OpBase.MarshalJSON
// overriding the outer op's MarshalJSON
func (op *CreateOperation) UnmarshalJSON(data []byte) error {
@@ -98,6 +121,7 @@ func (op *CreateOperation) UnmarshalJSON(data []byte) error {
}
aux := struct {
+ Nonce []byte `json:"nonce"`
Title string `json:"title"`
Message string `json:"message"`
Files []repository.Hash `json:"files"`
@@ -109,6 +133,7 @@ func (op *CreateOperation) UnmarshalJSON(data []byte) error {
}
op.OpBase = base
+ op.Nonce = aux.Nonce
op.Title = aux.Title
op.Message = aux.Message
op.Files = aux.Files
diff --git a/bug/op_create_test.go b/bug/op_create_test.go
index f68b7637..1b359dee 100644
--- a/bug/op_create_test.go
+++ b/bug/op_create_test.go
@@ -5,17 +5,22 @@ import (
"testing"
"time"
+ "github.com/stretchr/testify/require"
+
+ "github.com/MichaelMure/git-bug/entity"
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
"github.com/MichaelMure/git-bug/util/timestamp"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
)
func TestCreate(t *testing.T) {
snapshot := Snapshot{}
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
+ repo := repository.NewMockRepoClock()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
+ require.NoError(t, err)
+
unix := time.Now().Unix()
create := NewCreateOp(rene, unix, "title", "message", nil)
@@ -23,16 +28,19 @@ func TestCreate(t *testing.T) {
create.Apply(&snapshot)
id := create.Id()
- assert.NoError(t, id.Validate())
+ require.NoError(t, id.Validate())
+
+ commentId := entity.CombineIds(create.Id(), create.Id())
comment := Comment{
- id: id,
+ id: commentId,
Author: rene,
Message: "message",
UnixTime: timestamp.Timestamp(create.UnixTime),
}
expected := Snapshot{
+ id: create.Id(),
Title: "title",
Comments: []Comment{
comment,
@@ -43,36 +51,36 @@ func TestCreate(t *testing.T) {
CreateTime: create.Time(),
Timeline: []TimelineItem{
&CreateTimelineItem{
- CommentTimelineItem: NewCommentTimelineItem(id, comment),
+ CommentTimelineItem: NewCommentTimelineItem(commentId, comment),
},
},
}
- assert.Equal(t, expected, snapshot)
+ require.Equal(t, expected, snapshot)
}
func TestCreateSerialize(t *testing.T) {
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
before := NewCreateOp(rene, unix, "title", "message", nil)
data, err := json.Marshal(before)
- assert.NoError(t, err)
+ require.NoError(t, err)
var after CreateOperation
err = json.Unmarshal(data, &after)
- assert.NoError(t, err)
+ require.NoError(t, err)
// enforce creating the ID
before.Id()
// Replace the identity stub with the real thing
- assert.Equal(t, rene.Id(), after.base().Author.Id())
- after.Author = rene
+ require.Equal(t, rene.Id(), after.Author().Id())
+ after.Author_ = rene
- assert.Equal(t, before, &after)
+ require.Equal(t, before, &after)
}
diff --git a/bug/op_edit_comment.go b/bug/op_edit_comment.go
index 5bfc36bf..3e6634e4 100644
--- a/bug/op_edit_comment.go
+++ b/bug/op_edit_comment.go
@@ -7,6 +7,7 @@ import (
"github.com/pkg/errors"
"github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/entity/dag"
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
"github.com/MichaelMure/git-bug/util/timestamp"
@@ -15,6 +16,7 @@ import (
)
var _ Operation = &EditCommentOperation{}
+var _ dag.OperationWithFiles = &EditCommentOperation{}
// EditCommentOperation will change a comment in the bug
type EditCommentOperation struct {
@@ -24,22 +26,15 @@ type EditCommentOperation struct {
Files []repository.Hash `json:"files"`
}
-// Sign-post method for gqlgen
-func (op *EditCommentOperation) IsOperation() {}
-
-func (op *EditCommentOperation) base() *OpBase {
- return &op.OpBase
-}
-
func (op *EditCommentOperation) Id() entity.Id {
- return idOperation(op)
+ return idOperation(op, &op.OpBase)
}
func (op *EditCommentOperation) Apply(snapshot *Snapshot) {
// Todo: currently any message can be edited, even by a different author
// crypto signature are needed.
- snapshot.addActor(op.Author)
+ snapshot.addActor(op.Author_)
var target TimelineItem
@@ -85,7 +80,7 @@ func (op *EditCommentOperation) GetFiles() []repository.Hash {
}
func (op *EditCommentOperation) Validate() error {
- if err := opBaseValidate(op, EditCommentOp); err != nil {
+ if err := op.OpBase.Validate(op, EditCommentOp); err != nil {
return err
}
@@ -100,7 +95,7 @@ func (op *EditCommentOperation) Validate() error {
return nil
}
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
// This workaround is necessary to avoid the inner OpBase.MarshalJSON
// overriding the outer op's MarshalJSON
func (op *EditCommentOperation) UnmarshalJSON(data []byte) error {
diff --git a/bug/op_edit_comment_test.go b/bug/op_edit_comment_test.go
index 583ba656..777f5f87 100644
--- a/bug/op_edit_comment_test.go
+++ b/bug/op_edit_comment_test.go
@@ -5,7 +5,6 @@ import (
"testing"
"time"
- "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/MichaelMure/git-bug/identity"
@@ -15,9 +14,9 @@ import (
func TestEdit(t *testing.T) {
snapshot := Snapshot{}
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
@@ -44,62 +43,62 @@ func TestEdit(t *testing.T) {
id3 := comment2.Id()
require.NoError(t, id3.Validate())
- edit := NewEditCommentOp(rene, unix, id1, "create edited", nil)
+ edit := NewEditCommentOp(rene, unix, snapshot.Comments[0].Id(), "create edited", nil)
edit.Apply(&snapshot)
- assert.Equal(t, len(snapshot.Timeline), 4)
- assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2)
- assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 1)
- assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1)
- assert.Equal(t, snapshot.Comments[0].Message, "create edited")
- assert.Equal(t, snapshot.Comments[1].Message, "comment 1")
- assert.Equal(t, snapshot.Comments[2].Message, "comment 2")
+ require.Len(t, snapshot.Timeline, 4)
+ require.Len(t, snapshot.Timeline[0].(*CreateTimelineItem).History, 2)
+ require.Len(t, snapshot.Timeline[1].(*AddCommentTimelineItem).History, 1)
+ require.Len(t, snapshot.Timeline[3].(*AddCommentTimelineItem).History, 1)
+ require.Equal(t, snapshot.Comments[0].Message, "create edited")
+ require.Equal(t, snapshot.Comments[1].Message, "comment 1")
+ require.Equal(t, snapshot.Comments[2].Message, "comment 2")
- edit2 := NewEditCommentOp(rene, unix, id2, "comment 1 edited", nil)
+ edit2 := NewEditCommentOp(rene, unix, snapshot.Comments[1].Id(), "comment 1 edited", nil)
edit2.Apply(&snapshot)
- assert.Equal(t, len(snapshot.Timeline), 4)
- assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2)
- assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2)
- assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1)
- assert.Equal(t, snapshot.Comments[0].Message, "create edited")
- assert.Equal(t, snapshot.Comments[1].Message, "comment 1 edited")
- assert.Equal(t, snapshot.Comments[2].Message, "comment 2")
+ require.Len(t, snapshot.Timeline, 4)
+ require.Len(t, snapshot.Timeline[0].(*CreateTimelineItem).History, 2)
+ require.Len(t, snapshot.Timeline[1].(*AddCommentTimelineItem).History, 2)
+ require.Len(t, snapshot.Timeline[3].(*AddCommentTimelineItem).History, 1)
+ require.Equal(t, snapshot.Comments[0].Message, "create edited")
+ require.Equal(t, snapshot.Comments[1].Message, "comment 1 edited")
+ require.Equal(t, snapshot.Comments[2].Message, "comment 2")
- edit3 := NewEditCommentOp(rene, unix, id3, "comment 2 edited", nil)
+ edit3 := NewEditCommentOp(rene, unix, snapshot.Comments[2].Id(), "comment 2 edited", nil)
edit3.Apply(&snapshot)
- assert.Equal(t, len(snapshot.Timeline), 4)
- assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2)
- assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2)
- assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 2)
- assert.Equal(t, snapshot.Comments[0].Message, "create edited")
- assert.Equal(t, snapshot.Comments[1].Message, "comment 1 edited")
- assert.Equal(t, snapshot.Comments[2].Message, "comment 2 edited")
+ require.Len(t, snapshot.Timeline, 4)
+ require.Len(t, snapshot.Timeline[0].(*CreateTimelineItem).History, 2)
+ require.Len(t, snapshot.Timeline[1].(*AddCommentTimelineItem).History, 2)
+ require.Len(t, snapshot.Timeline[3].(*AddCommentTimelineItem).History, 2)
+ require.Equal(t, snapshot.Comments[0].Message, "create edited")
+ require.Equal(t, snapshot.Comments[1].Message, "comment 1 edited")
+ require.Equal(t, snapshot.Comments[2].Message, "comment 2 edited")
}
func TestEditCommentSerialize(t *testing.T) {
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
before := NewEditCommentOp(rene, unix, "target", "message", nil)
data, err := json.Marshal(before)
- assert.NoError(t, err)
+ require.NoError(t, err)
var after EditCommentOperation
err = json.Unmarshal(data, &after)
- assert.NoError(t, err)
+ require.NoError(t, err)
// enforce creating the ID
before.Id()
// Replace the identity stub with the real thing
- assert.Equal(t, rene.Id(), after.base().Author.Id())
- after.Author = rene
+ require.Equal(t, rene.Id(), after.Author().Id())
+ after.Author_ = rene
- assert.Equal(t, before, &after)
+ require.Equal(t, before, &after)
}
diff --git a/bug/op_label_change.go b/bug/op_label_change.go
index fefe2402..8b0e5ec8 100644
--- a/bug/op_label_change.go
+++ b/bug/op_label_change.go
@@ -21,20 +21,13 @@ type LabelChangeOperation struct {
Removed []Label `json:"removed"`
}
-// Sign-post method for gqlgen
-func (op *LabelChangeOperation) IsOperation() {}
-
-func (op *LabelChangeOperation) base() *OpBase {
- return &op.OpBase
-}
-
func (op *LabelChangeOperation) Id() entity.Id {
- return idOperation(op)
+ return idOperation(op, &op.OpBase)
}
// Apply apply the operation
func (op *LabelChangeOperation) Apply(snapshot *Snapshot) {
- snapshot.addActor(op.Author)
+ snapshot.addActor(op.Author_)
// Add in the set
AddLoop:
@@ -66,7 +59,7 @@ AddLoop:
item := &LabelChangeTimelineItem{
id: op.Id(),
- Author: op.Author,
+ Author: op.Author_,
UnixTime: timestamp.Timestamp(op.UnixTime),
Added: op.Added,
Removed: op.Removed,
@@ -76,7 +69,7 @@ AddLoop:
}
func (op *LabelChangeOperation) Validate() error {
- if err := opBaseValidate(op, LabelChangeOp); err != nil {
+ if err := op.OpBase.Validate(op, LabelChangeOp); err != nil {
return err
}
@@ -99,7 +92,7 @@ func (op *LabelChangeOperation) Validate() error {
return nil
}
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
// This workaround is necessary to avoid the inner OpBase.MarshalJSON
// overriding the outer op's MarshalJSON
func (op *LabelChangeOperation) UnmarshalJSON(data []byte) error {
diff --git a/bug/op_label_change_test.go b/bug/op_label_change_test.go
index c98b2207..40dc4f0d 100644
--- a/bug/op_label_change_test.go
+++ b/bug/op_label_change_test.go
@@ -9,32 +9,30 @@ import (
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
-
- "github.com/stretchr/testify/assert"
)
func TestLabelChangeSerialize(t *testing.T) {
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
before := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"})
data, err := json.Marshal(before)
- assert.NoError(t, err)
+ require.NoError(t, err)
var after LabelChangeOperation
err = json.Unmarshal(data, &after)
- assert.NoError(t, err)
+ require.NoError(t, err)
// enforce creating the ID
before.Id()
// Replace the identity stub with the real thing
- assert.Equal(t, rene.Id(), after.base().Author.Id())
- after.Author = rene
+ require.Equal(t, rene.Id(), after.Author().Id())
+ after.Author_ = rene
- assert.Equal(t, before, &after)
+ require.Equal(t, before, &after)
}
diff --git a/bug/op_noop.go b/bug/op_noop.go
index 6364f918..1b11e694 100644
--- a/bug/op_noop.go
+++ b/bug/op_noop.go
@@ -16,15 +16,8 @@ type NoOpOperation struct {
OpBase
}
-// Sign-post method for gqlgen
-func (op *NoOpOperation) IsOperation() {}
-
-func (op *NoOpOperation) base() *OpBase {
- return &op.OpBase
-}
-
func (op *NoOpOperation) Id() entity.Id {
- return idOperation(op)
+ return idOperation(op, &op.OpBase)
}
func (op *NoOpOperation) Apply(snapshot *Snapshot) {
@@ -32,10 +25,10 @@ func (op *NoOpOperation) Apply(snapshot *Snapshot) {
}
func (op *NoOpOperation) Validate() error {
- return opBaseValidate(op, NoOpOp)
+ return op.OpBase.Validate(op, NoOpOp)
}
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
// This workaround is necessary to avoid the inner OpBase.MarshalJSON
// overriding the outer op's MarshalJSON
func (op *NoOpOperation) UnmarshalJSON(data []byte) error {
diff --git a/bug/op_noop_test.go b/bug/op_noop_test.go
index 0e34c961..0e3727c2 100644
--- a/bug/op_noop_test.go
+++ b/bug/op_noop_test.go
@@ -14,9 +14,9 @@ import (
)
func TestNoopSerialize(t *testing.T) {
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
@@ -33,8 +33,8 @@ func TestNoopSerialize(t *testing.T) {
before.Id()
// Replace the identity stub with the real thing
- assert.Equal(t, rene.Id(), after.base().Author.Id())
- after.Author = rene
+ assert.Equal(t, rene.Id(), after.Author().Id())
+ after.Author_ = rene
assert.Equal(t, before, &after)
}
diff --git a/bug/op_set_metadata.go b/bug/op_set_metadata.go
index 23d11461..ca19a838 100644
--- a/bug/op_set_metadata.go
+++ b/bug/op_set_metadata.go
@@ -17,41 +17,25 @@ type SetMetadataOperation struct {
NewMetadata map[string]string `json:"new_metadata"`
}
-// Sign-post method for gqlgen
-func (op *SetMetadataOperation) IsOperation() {}
-
-func (op *SetMetadataOperation) base() *OpBase {
- return &op.OpBase
-}
-
func (op *SetMetadataOperation) Id() entity.Id {
- return idOperation(op)
+ return idOperation(op, &op.OpBase)
}
func (op *SetMetadataOperation) Apply(snapshot *Snapshot) {
for _, target := range snapshot.Operations {
if target.Id() == op.Target {
- base := target.base()
-
- if base.extraMetadata == nil {
- base.extraMetadata = make(map[string]string)
- }
-
// Apply the metadata in an immutable way: if a metadata already
// exist, it's not possible to override it.
- for key, val := range op.NewMetadata {
- if _, exist := base.extraMetadata[key]; !exist {
- base.extraMetadata[key] = val
- }
+ for key, value := range op.NewMetadata {
+ target.setExtraMetadataImmutable(key, value)
}
-
return
}
}
}
func (op *SetMetadataOperation) Validate() error {
- if err := opBaseValidate(op, SetMetadataOp); err != nil {
+ if err := op.OpBase.Validate(op, SetMetadataOp); err != nil {
return err
}
@@ -62,7 +46,7 @@ func (op *SetMetadataOperation) Validate() error {
return nil
}
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
// This workaround is necessary to avoid the inner OpBase.MarshalJSON
// overriding the outer op's MarshalJSON
func (op *SetMetadataOperation) UnmarshalJSON(data []byte) error {
diff --git a/bug/op_set_metadata_test.go b/bug/op_set_metadata_test.go
index d7711249..78f7d883 100644
--- a/bug/op_set_metadata_test.go
+++ b/bug/op_set_metadata_test.go
@@ -8,16 +8,15 @@ import (
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
- "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSetMetadata(t *testing.T) {
snapshot := Snapshot{}
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
@@ -47,15 +46,15 @@ func TestSetMetadata(t *testing.T) {
snapshot.Operations = append(snapshot.Operations, op1)
createMetadata := snapshot.Operations[0].AllMetadata()
- assert.Equal(t, len(createMetadata), 2)
+ require.Len(t, createMetadata, 2)
// original key is not overrided
- assert.Equal(t, createMetadata["key"], "value")
+ require.Equal(t, createMetadata["key"], "value")
// new key is set
- assert.Equal(t, createMetadata["key2"], "value")
+ require.Equal(t, createMetadata["key2"], "value")
commentMetadata := snapshot.Operations[1].AllMetadata()
- assert.Equal(t, len(commentMetadata), 1)
- assert.Equal(t, commentMetadata["key2"], "value2")
+ require.Len(t, commentMetadata, 1)
+ require.Equal(t, commentMetadata["key2"], "value2")
op2 := NewSetMetadataOp(rene, unix, id2, map[string]string{
"key2": "value",
@@ -66,16 +65,16 @@ func TestSetMetadata(t *testing.T) {
snapshot.Operations = append(snapshot.Operations, op2)
createMetadata = snapshot.Operations[0].AllMetadata()
- assert.Equal(t, len(createMetadata), 2)
- assert.Equal(t, createMetadata["key"], "value")
- assert.Equal(t, createMetadata["key2"], "value")
+ require.Len(t, createMetadata, 2)
+ require.Equal(t, createMetadata["key"], "value")
+ require.Equal(t, createMetadata["key2"], "value")
commentMetadata = snapshot.Operations[1].AllMetadata()
- assert.Equal(t, len(commentMetadata), 2)
+ require.Len(t, commentMetadata, 2)
// original key is not overrided
- assert.Equal(t, commentMetadata["key2"], "value2")
+ require.Equal(t, commentMetadata["key2"], "value2")
// new key is set
- assert.Equal(t, commentMetadata["key3"], "value3")
+ require.Equal(t, commentMetadata["key3"], "value3")
op3 := NewSetMetadataOp(rene, unix, id1, map[string]string{
"key": "override",
@@ -86,22 +85,22 @@ func TestSetMetadata(t *testing.T) {
snapshot.Operations = append(snapshot.Operations, op3)
createMetadata = snapshot.Operations[0].AllMetadata()
- assert.Equal(t, len(createMetadata), 2)
+ require.Len(t, createMetadata, 2)
// original key is not overrided
- assert.Equal(t, createMetadata["key"], "value")
+ require.Equal(t, createMetadata["key"], "value")
// previously set key is not overrided
- assert.Equal(t, createMetadata["key2"], "value")
+ require.Equal(t, createMetadata["key2"], "value")
commentMetadata = snapshot.Operations[1].AllMetadata()
- assert.Equal(t, len(commentMetadata), 2)
- assert.Equal(t, commentMetadata["key2"], "value2")
- assert.Equal(t, commentMetadata["key3"], "value3")
+ require.Len(t, commentMetadata, 2)
+ require.Equal(t, commentMetadata["key2"], "value2")
+ require.Equal(t, commentMetadata["key3"], "value3")
}
func TestSetMetadataSerialize(t *testing.T) {
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
@@ -111,18 +110,18 @@ func TestSetMetadataSerialize(t *testing.T) {
})
data, err := json.Marshal(before)
- assert.NoError(t, err)
+ require.NoError(t, err)
var after SetMetadataOperation
err = json.Unmarshal(data, &after)
- assert.NoError(t, err)
+ require.NoError(t, err)
// enforce creating the ID
before.Id()
// Replace the identity stub with the real thing
- assert.Equal(t, rene.Id(), after.base().Author.Id())
- after.Author = rene
+ require.Equal(t, rene.Id(), after.Author().Id())
+ after.Author_ = rene
- assert.Equal(t, before, &after)
+ require.Equal(t, before, &after)
}
diff --git a/bug/op_set_status.go b/bug/op_set_status.go
index eb2c0ba4..e22ded54 100644
--- a/bug/op_set_status.go
+++ b/bug/op_set_status.go
@@ -18,24 +18,17 @@ type SetStatusOperation struct {
Status Status `json:"status"`
}
-// Sign-post method for gqlgen
-func (op *SetStatusOperation) IsOperation() {}
-
-func (op *SetStatusOperation) base() *OpBase {
- return &op.OpBase
-}
-
func (op *SetStatusOperation) Id() entity.Id {
- return idOperation(op)
+ return idOperation(op, &op.OpBase)
}
func (op *SetStatusOperation) Apply(snapshot *Snapshot) {
snapshot.Status = op.Status
- snapshot.addActor(op.Author)
+ snapshot.addActor(op.Author_)
item := &SetStatusTimelineItem{
id: op.Id(),
- Author: op.Author,
+ Author: op.Author_,
UnixTime: timestamp.Timestamp(op.UnixTime),
Status: op.Status,
}
@@ -44,7 +37,7 @@ func (op *SetStatusOperation) Apply(snapshot *Snapshot) {
}
func (op *SetStatusOperation) Validate() error {
- if err := opBaseValidate(op, SetStatusOp); err != nil {
+ if err := op.OpBase.Validate(op, SetStatusOp); err != nil {
return err
}
@@ -55,7 +48,7 @@ func (op *SetStatusOperation) Validate() error {
return nil
}
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
// This workaround is necessary to avoid the inner OpBase.MarshalJSON
// overriding the outer op's MarshalJSON
func (op *SetStatusOperation) UnmarshalJSON(data []byte) error {
diff --git a/bug/op_set_status_test.go b/bug/op_set_status_test.go
index cdea2dd2..83ff22ae 100644
--- a/bug/op_set_status_test.go
+++ b/bug/op_set_status_test.go
@@ -9,32 +9,30 @@ import (
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
-
- "github.com/stretchr/testify/assert"
)
func TestSetStatusSerialize(t *testing.T) {
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
before := NewSetStatusOp(rene, unix, ClosedStatus)
data, err := json.Marshal(before)
- assert.NoError(t, err)
+ require.NoError(t, err)
var after SetStatusOperation
err = json.Unmarshal(data, &after)
- assert.NoError(t, err)
+ require.NoError(t, err)
// enforce creating the ID
before.Id()
// Replace the identity stub with the real thing
- assert.Equal(t, rene.Id(), after.base().Author.Id())
- after.Author = rene
+ require.Equal(t, rene.Id(), after.Author().Id())
+ after.Author_ = rene
- assert.Equal(t, before, &after)
+ require.Equal(t, before, &after)
}
diff --git a/bug/op_set_title.go b/bug/op_set_title.go
index ddd98f0e..c6a26746 100644
--- a/bug/op_set_title.go
+++ b/bug/op_set_title.go
@@ -21,24 +21,17 @@ type SetTitleOperation struct {
Was string `json:"was"`
}
-// Sign-post method for gqlgen
-func (op *SetTitleOperation) IsOperation() {}
-
-func (op *SetTitleOperation) base() *OpBase {
- return &op.OpBase
-}
-
func (op *SetTitleOperation) Id() entity.Id {
- return idOperation(op)
+ return idOperation(op, &op.OpBase)
}
func (op *SetTitleOperation) Apply(snapshot *Snapshot) {
snapshot.Title = op.Title
- snapshot.addActor(op.Author)
+ snapshot.addActor(op.Author_)
item := &SetTitleTimelineItem{
id: op.Id(),
- Author: op.Author,
+ Author: op.Author_,
UnixTime: timestamp.Timestamp(op.UnixTime),
Title: op.Title,
Was: op.Was,
@@ -48,7 +41,7 @@ func (op *SetTitleOperation) Apply(snapshot *Snapshot) {
}
func (op *SetTitleOperation) Validate() error {
- if err := opBaseValidate(op, SetTitleOp); err != nil {
+ if err := op.OpBase.Validate(op, SetTitleOp); err != nil {
return err
}
@@ -75,7 +68,7 @@ func (op *SetTitleOperation) Validate() error {
return nil
}
-// UnmarshalJSON is a two step JSON unmarshaling
+// UnmarshalJSON is a two step JSON unmarshalling
// This workaround is necessary to avoid the inner OpBase.MarshalJSON
// overriding the outer op's MarshalJSON
func (op *SetTitleOperation) UnmarshalJSON(data []byte) error {
@@ -132,19 +125,17 @@ func (s *SetTitleTimelineItem) IsAuthored() {}
// Convenience function to apply the operation
func SetTitle(b Interface, author identity.Interface, unixTime int64, title string) (*SetTitleOperation, error) {
- it := NewOperationIterator(b)
-
- var lastTitleOp Operation
- for it.Next() {
- op := it.Value()
- if op.base().OperationType == SetTitleOp {
+ var lastTitleOp *SetTitleOperation
+ for _, op := range b.Operations() {
+ switch op := op.(type) {
+ case *SetTitleOperation:
lastTitleOp = op
}
}
var was string
if lastTitleOp != nil {
- was = lastTitleOp.(*SetTitleOperation).Title
+ was = lastTitleOp.Title
} else {
was = b.FirstOp().(*CreateOperation).Title
}
diff --git a/bug/op_set_title_test.go b/bug/op_set_title_test.go
index 368ada61..7059c4c7 100644
--- a/bug/op_set_title_test.go
+++ b/bug/op_set_title_test.go
@@ -9,32 +9,30 @@ import (
"github.com/MichaelMure/git-bug/identity"
"github.com/MichaelMure/git-bug/repository"
-
- "github.com/stretchr/testify/assert"
)
func TestSetTitleSerialize(t *testing.T) {
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ repo := repository.NewMockRepo()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
require.NoError(t, err)
unix := time.Now().Unix()
before := NewSetTitleOp(rene, unix, "title", "was")
data, err := json.Marshal(before)
- assert.NoError(t, err)
+ require.NoError(t, err)
var after SetTitleOperation
err = json.Unmarshal(data, &after)
- assert.NoError(t, err)
+ require.NoError(t, err)
// enforce creating the ID
before.Id()
// Replace the identity stub with the real thing
- assert.Equal(t, rene.Id(), after.base().Author.Id())
- after.Author = rene
+ require.Equal(t, rene.Id(), after.Author().Id())
+ after.Author_ = rene
- assert.Equal(t, before, &after)
+ require.Equal(t, before, &after)
}
diff --git a/bug/operation.go b/bug/operation.go
index 107c954e..8daa2cde 100644
--- a/bug/operation.go
+++ b/bug/operation.go
@@ -1,7 +1,7 @@
package bug
import (
- "crypto/sha256"
+ "crypto/rand"
"encoding/json"
"fmt"
"time"
@@ -9,8 +9,8 @@ import (
"github.com/pkg/errors"
"github.com/MichaelMure/git-bug/entity"
+ "github.com/MichaelMure/git-bug/entity/dag"
"github.com/MichaelMure/git-bug/identity"
- "github.com/MichaelMure/git-bug/repository"
)
// OperationType is an operation type identifier
@@ -30,39 +30,27 @@ const (
// Operation define the interface to fulfill for an edit operation of a Bug
type Operation interface {
- // base return the OpBase of the Operation, for package internal use
- base() *OpBase
- // Id return the identifier of the operation, to be used for back references
- Id() entity.Id
+ dag.Operation
+
+ // Type return the type of the operation
+ Type() OperationType
+
// Time return the time when the operation was added
Time() time.Time
- // GetFiles return the files needed by this operation
- GetFiles() []repository.Hash
// Apply the operation to a Snapshot to create the final state
Apply(snapshot *Snapshot)
- // Validate check if the operation is valid (ex: a title is a single line)
- Validate() error
+
// SetMetadata store arbitrary metadata about the operation
SetMetadata(key string, value string)
// GetMetadata retrieve arbitrary metadata about the operation
GetMetadata(key string) (string, bool)
// AllMetadata return all metadata for this operation
AllMetadata() map[string]string
- // GetAuthor return the author identity
- GetAuthor() identity.Interface
-
- // sign-post method for gqlgen
- IsOperation()
-}
-func deriveId(data []byte) entity.Id {
- sum := sha256.Sum256(data)
- return entity.Id(fmt.Sprintf("%x", sum))
+ setExtraMetadataImmutable(key string, value string)
}
-func idOperation(op Operation) entity.Id {
- base := op.base()
-
+func idOperation(op Operation, base *OpBase) entity.Id {
if base.id == "" {
// something went really wrong
panic("op's id not set")
@@ -78,18 +66,83 @@ func idOperation(op Operation) entity.Id {
panic(err)
}
- base.id = deriveId(data)
+ base.id = entity.DeriveId(data)
}
return base.id
}
+func operationUnmarshaller(author identity.Interface, raw json.RawMessage) (dag.Operation, error) {
+ var t struct {
+ OperationType OperationType `json:"type"`
+ }
+
+ if err := json.Unmarshal(raw, &t); err != nil {
+ return nil, err
+ }
+
+ var op Operation
+
+ switch t.OperationType {
+ case AddCommentOp:
+ op = &AddCommentOperation{}
+ case CreateOp:
+ op = &CreateOperation{}
+ case EditCommentOp:
+ op = &EditCommentOperation{}
+ case LabelChangeOp:
+ op = &LabelChangeOperation{}
+ case NoOpOp:
+ op = &NoOpOperation{}
+ case SetMetadataOp:
+ op = &SetMetadataOperation{}
+ case SetStatusOp:
+ op = &SetStatusOperation{}
+ case SetTitleOp:
+ op = &SetTitleOperation{}
+ default:
+ panic(fmt.Sprintf("unknown operation type %v", t.OperationType))
+ }
+
+ err := json.Unmarshal(raw, &op)
+ if err != nil {
+ return nil, err
+ }
+
+ switch op := op.(type) {
+ case *AddCommentOperation:
+ op.Author_ = author
+ case *CreateOperation:
+ op.Author_ = author
+ case *LabelChangeOperation:
+ op.Author_ = author
+ case *NoOpOperation:
+ op.Author_ = author
+ case *SetMetadataOperation:
+ op.Author_ = author
+ case *SetStatusOperation:
+ op.Author_ = author
+ case *SetTitleOperation:
+ op.Author_ = author
+ default:
+ panic(fmt.Sprintf("unknown operation type %T", op))
+ }
+
+ return op, nil
+}
+
// OpBase implement the common code for all operations
type OpBase struct {
OperationType OperationType `json:"type"`
- Author identity.Interface `json:"author"`
+ Author_ identity.Interface `json:"author"`
// TODO: part of the data model upgrade, this should eventually be a timestamp + lamport
UnixTime int64 `json:"timestamp"`
Metadata map[string]string `json:"metadata,omitempty"`
+
+ // mandatory random bytes to ensure a better randomness of the data used to later generate the ID
+ // len(Nonce) should be > 20 and < 64 bytes
+ // It has no functional purpose and should be ignored.
+ Nonce []byte `json:"nonce"`
+
// Not serialized. Store the op's id in memory.
id entity.Id
// Not serialized. Store the extra metadata in memory,
@@ -101,21 +154,32 @@ type OpBase struct {
func newOpBase(opType OperationType, author identity.Interface, unixTime int64) OpBase {
return OpBase{
OperationType: opType,
- Author: author,
+ Author_: author,
UnixTime: unixTime,
+ Nonce: makeNonce(20),
id: entity.UnsetId,
}
}
-func (op *OpBase) UnmarshalJSON(data []byte) error {
+func makeNonce(len int) []byte {
+ result := make([]byte, len)
+ _, err := rand.Read(result)
+ if err != nil {
+ panic(err)
+ }
+ return result
+}
+
+func (base *OpBase) UnmarshalJSON(data []byte) error {
// Compute the Id when loading the op from disk.
- op.id = deriveId(data)
+ base.id = entity.DeriveId(data)
aux := struct {
OperationType OperationType `json:"type"`
Author json.RawMessage `json:"author"`
UnixTime int64 `json:"timestamp"`
Metadata map[string]string `json:"metadata,omitempty"`
+ Nonce []byte `json:"nonce"`
}{}
if err := json.Unmarshal(data, &aux); err != nil {
@@ -128,92 +192,110 @@ func (op *OpBase) UnmarshalJSON(data []byte) error {
return err
}
- op.OperationType = aux.OperationType
- op.Author = author
- op.UnixTime = aux.UnixTime
- op.Metadata = aux.Metadata
+ base.OperationType = aux.OperationType
+ base.Author_ = author
+ base.UnixTime = aux.UnixTime
+ base.Metadata = aux.Metadata
+ base.Nonce = aux.Nonce
return nil
}
-// Time return the time when the operation was added
-func (op *OpBase) Time() time.Time {
- return time.Unix(op.UnixTime, 0)
+func (base *OpBase) Type() OperationType {
+ return base.OperationType
}
-// GetFiles return the files needed by this operation
-func (op *OpBase) GetFiles() []repository.Hash {
- return nil
+// Time return the time when the operation was added
+func (base *OpBase) Time() time.Time {
+ return time.Unix(base.UnixTime, 0)
}
// Validate check the OpBase for errors
-func opBaseValidate(op Operation, opType OperationType) error {
- if op.base().OperationType != opType {
- return fmt.Errorf("incorrect operation type (expected: %v, actual: %v)", opType, op.base().OperationType)
+func (base *OpBase) Validate(op Operation, opType OperationType) error {
+ if base.OperationType != opType {
+ return fmt.Errorf("incorrect operation type (expected: %v, actual: %v)", opType, base.OperationType)
}
if op.Time().Unix() == 0 {
return fmt.Errorf("time not set")
}
- if op.base().Author == nil {
+ if base.Author_ == nil {
return fmt.Errorf("author not set")
}
- if err := op.base().Author.Validate(); err != nil {
+ if err := op.Author().Validate(); err != nil {
return errors.Wrap(err, "author")
}
- for _, hash := range op.GetFiles() {
- if !hash.IsValid() {
- return fmt.Errorf("file with invalid hash %v", hash)
+ if op, ok := op.(dag.OperationWithFiles); ok {
+ for _, hash := range op.GetFiles() {
+ if !hash.IsValid() {
+ return fmt.Errorf("file with invalid hash %v", hash)
+ }
}
}
+ if len(base.Nonce) > 64 {
+ return fmt.Errorf("nonce is too big")
+ }
+ if len(base.Nonce) < 20 {
+ return fmt.Errorf("nonce is too small")
+ }
+
return nil
}
// SetMetadata store arbitrary metadata about the operation
-func (op *OpBase) SetMetadata(key string, value string) {
- if op.Metadata == nil {
- op.Metadata = make(map[string]string)
+func (base *OpBase) SetMetadata(key string, value string) {
+ if base.Metadata == nil {
+ base.Metadata = make(map[string]string)
}
- op.Metadata[key] = value
- op.id = entity.UnsetId
+ base.Metadata[key] = value
+ base.id = entity.UnsetId
}
// GetMetadata retrieve arbitrary metadata about the operation
-func (op *OpBase) GetMetadata(key string) (string, bool) {
- val, ok := op.Metadata[key]
+func (base *OpBase) GetMetadata(key string) (string, bool) {
+ val, ok := base.Metadata[key]
if ok {
return val, true
}
// extraMetadata can't replace the original operations value if any
- val, ok = op.extraMetadata[key]
+ val, ok = base.extraMetadata[key]
return val, ok
}
// AllMetadata return all metadata for this operation
-func (op *OpBase) AllMetadata() map[string]string {
+func (base *OpBase) AllMetadata() map[string]string {
result := make(map[string]string)
- for key, val := range op.extraMetadata {
+ for key, val := range base.extraMetadata {
result[key] = val
}
// Original metadata take precedence
- for key, val := range op.Metadata {
+ for key, val := range base.Metadata {
result[key] = val
}
return result
}
-// GetAuthor return author identity
-func (op *OpBase) GetAuthor() identity.Interface {
- return op.Author
+func (base *OpBase) setExtraMetadataImmutable(key string, value string) {
+ if base.extraMetadata == nil {
+ base.extraMetadata = make(map[string]string)
+ }
+ if _, exist := base.extraMetadata[key]; !exist {
+ base.extraMetadata[key] = value
+ }
+}
+
+// Author return author identity
+func (base *OpBase) Author() identity.Interface {
+ return base.Author_
}
diff --git a/bug/operation_iterator.go b/bug/operation_iterator.go
deleted file mode 100644
index f42b1776..00000000
--- a/bug/operation_iterator.go
+++ /dev/null
@@ -1,72 +0,0 @@
-package bug
-
-type OperationIterator struct {
- bug *Bug
- packIndex int
- opIndex int
-}
-
-func NewOperationIterator(bug Interface) *OperationIterator {
- return &OperationIterator{
- bug: bugFromInterface(bug),
- packIndex: 0,
- opIndex: -1,
- }
-}
-
-func (it *OperationIterator) Next() bool {
- // Special case of the staging area
- if it.packIndex == len(it.bug.packs) {
- pack := it.bug.staging
- it.opIndex++
- return it.opIndex < len(pack.Operations)
- }
-
- if it.packIndex >= len(it.bug.packs) {
- return false
- }
-
- pack := it.bug.packs[it.packIndex]
-
- it.opIndex++
-
- if it.opIndex < len(pack.Operations) {
- return true
- }
-
- // Note: this iterator doesn't handle the empty pack case
- it.opIndex = 0
- it.packIndex++
-
- // Special case of the non-empty staging area
- if it.packIndex == len(it.bug.packs) && len(it.bug.staging.Operations) > 0 {
- return true
- }
-
- return it.packIndex < len(it.bug.packs)
-}
-
-func (it *OperationIterator) Value() Operation {
- // Special case of the staging area
- if it.packIndex == len(it.bug.packs) {
- pack := it.bug.staging
-
- if it.opIndex >= len(pack.Operations) {
- panic("Iterator is not valid anymore")
- }
-
- return pack.Operations[it.opIndex]
- }
-
- if it.packIndex >= len(it.bug.packs) {
- panic("Iterator is not valid anymore")
- }
-
- pack := it.bug.packs[it.packIndex]
-
- if it.opIndex >= len(pack.Operations) {
- panic("Iterator is not valid anymore")
- }
-
- return pack.Operations[it.opIndex]
-}
diff --git a/bug/operation_iterator_test.go b/bug/operation_iterator_test.go
deleted file mode 100644
index 5d245185..00000000
--- a/bug/operation_iterator_test.go
+++ /dev/null
@@ -1,78 +0,0 @@
-package bug
-
-import (
- "fmt"
- "testing"
- "time"
-
- "github.com/stretchr/testify/require"
-
- "github.com/MichaelMure/git-bug/identity"
- "github.com/MichaelMure/git-bug/repository"
-)
-
-func ExampleOperationIterator() {
- b := NewBug()
-
- // add operations
-
- it := NewOperationIterator(b)
-
- for it.Next() {
- // do something with each operations
- _ = it.Value()
- }
-}
-
-func TestOpIterator(t *testing.T) {
- mockRepo := repository.NewMockRepoForTest()
-
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(mockRepo)
- require.NoError(t, err)
-
- unix := time.Now().Unix()
-
- createOp := NewCreateOp(rene, unix, "title", "message", nil)
- addCommentOp := NewAddCommentOp(rene, unix, "message2", nil)
- setStatusOp := NewSetStatusOp(rene, unix, ClosedStatus)
- labelChangeOp := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"})
-
- var i int
- genTitleOp := func() Operation {
- i++
- return NewSetTitleOp(rene, unix, fmt.Sprintf("title%d", i), "")
- }
-
- bug1 := NewBug()
-
- // first pack
- bug1.Append(createOp)
- bug1.Append(addCommentOp)
- bug1.Append(setStatusOp)
- bug1.Append(labelChangeOp)
- err = bug1.Commit(mockRepo)
- require.NoError(t, err)
-
- // second pack
- bug1.Append(genTitleOp())
- bug1.Append(genTitleOp())
- bug1.Append(genTitleOp())
- err = bug1.Commit(mockRepo)
- require.NoError(t, err)
-
- // staging
- bug1.Append(genTitleOp())
- bug1.Append(genTitleOp())
- bug1.Append(genTitleOp())
-
- it := NewOperationIterator(bug1)
-
- counter := 0
- for it.Next() {
- _ = it.Value()
- counter++
- }
-
- require.Equal(t, 10, counter)
-}
diff --git a/bug/operation_pack.go b/bug/operation_pack.go
deleted file mode 100644
index 1a8ef0db..00000000
--- a/bug/operation_pack.go
+++ /dev/null
@@ -1,188 +0,0 @@
-package bug
-
-import (
- "encoding/json"
- "fmt"
-
- "github.com/pkg/errors"
-
- "github.com/MichaelMure/git-bug/entity"
- "github.com/MichaelMure/git-bug/repository"
-)
-
-// 1: original format
-// 2: no more legacy identities
-const formatVersion = 2
-
-// OperationPack represent an ordered set of operation to apply
-// to a Bug. These operations are stored in a single Git commit.
-//
-// These commits will be linked together in a linear chain of commits
-// inside Git to form the complete ordered chain of operation to
-// apply to get the final state of the Bug
-type OperationPack struct {
- Operations []Operation
-
- // Private field so not serialized
- commitHash repository.Hash
-}
-
-func (opp *OperationPack) MarshalJSON() ([]byte, error) {
- return json.Marshal(struct {
- Version uint `json:"version"`
- Operations []Operation `json:"ops"`
- }{
- Version: formatVersion,
- Operations: opp.Operations,
- })
-}
-
-func (opp *OperationPack) UnmarshalJSON(data []byte) error {
- aux := struct {
- Version uint `json:"version"`
- Operations []json.RawMessage `json:"ops"`
- }{}
-
- if err := json.Unmarshal(data, &aux); err != nil {
- return err
- }
-
- if aux.Version < formatVersion {
- return entity.NewErrOldFormatVersion(aux.Version)
- }
- if aux.Version > formatVersion {
- return entity.NewErrNewFormatVersion(aux.Version)
- }
-
- for _, raw := range aux.Operations {
- var t struct {
- OperationType OperationType `json:"type"`
- }
-
- if err := json.Unmarshal(raw, &t); err != nil {
- return err
- }
-
- // delegate to specialized unmarshal function
- op, err := opp.unmarshalOp(raw, t.OperationType)
- if err != nil {
- return err
- }
-
- opp.Operations = append(opp.Operations, op)
- }
-
- return nil
-}
-
-func (opp *OperationPack) unmarshalOp(raw []byte, _type OperationType) (Operation, error) {
- switch _type {
- case AddCommentOp:
- op := &AddCommentOperation{}
- err := json.Unmarshal(raw, &op)
- return op, err
- case CreateOp:
- op := &CreateOperation{}
- err := json.Unmarshal(raw, &op)
- return op, err
- case EditCommentOp:
- op := &EditCommentOperation{}
- err := json.Unmarshal(raw, &op)
- return op, err
- case LabelChangeOp:
- op := &LabelChangeOperation{}
- err := json.Unmarshal(raw, &op)
- return op, err
- case NoOpOp:
- op := &NoOpOperation{}
- err := json.Unmarshal(raw, &op)
- return op, err
- case SetMetadataOp:
- op := &SetMetadataOperation{}
- err := json.Unmarshal(raw, &op)
- return op, err
- case SetStatusOp:
- op := &SetStatusOperation{}
- err := json.Unmarshal(raw, &op)
- return op, err
- case SetTitleOp:
- op := &SetTitleOperation{}
- err := json.Unmarshal(raw, &op)
- return op, err
- default:
- return nil, fmt.Errorf("unknown operation type %v", _type)
- }
-}
-
-// Append a new operation to the pack
-func (opp *OperationPack) Append(op Operation) {
- opp.Operations = append(opp.Operations, op)
-}
-
-// IsEmpty tell if the OperationPack is empty
-func (opp *OperationPack) IsEmpty() bool {
- return len(opp.Operations) == 0
-}
-
-// IsValid tell if the OperationPack is considered valid
-func (opp *OperationPack) Validate() error {
- if opp.IsEmpty() {
- return fmt.Errorf("empty")
- }
-
- for _, op := range opp.Operations {
- if err := op.Validate(); err != nil {
- return errors.Wrap(err, "op")
- }
- }
-
- return nil
-}
-
-// Write will serialize and store the OperationPack as a git blob and return
-// its hash
-func (opp *OperationPack) Write(repo repository.ClockedRepo) (repository.Hash, error) {
- // make sure we don't write invalid data
- err := opp.Validate()
- if err != nil {
- return "", errors.Wrap(err, "validation error")
- }
-
- // First, make sure that all the identities are properly Commit as well
- // TODO: this might be downgraded to "make sure it exist in git" but then, what make
- // sure no data is lost on identities ?
- for _, op := range opp.Operations {
- if op.base().Author.NeedCommit() {
- return "", fmt.Errorf("identity need commmit")
- }
- }
-
- data, err := json.Marshal(opp)
-
- if err != nil {
- return "", err
- }
-
- hash, err := repo.StoreData(data)
-
- if err != nil {
- return "", err
- }
-
- return hash, nil
-}
-
-// Make a deep copy
-func (opp *OperationPack) Clone() OperationPack {
-
- clone := OperationPack{
- Operations: make([]Operation, len(opp.Operations)),
- commitHash: opp.commitHash,
- }
-
- for i, op := range opp.Operations {
- clone.Operations[i] = op
- }
-
- return clone
-}
diff --git a/bug/operation_pack_test.go b/bug/operation_pack_test.go
deleted file mode 100644
index 6aab0097..00000000
--- a/bug/operation_pack_test.go
+++ /dev/null
@@ -1,79 +0,0 @@
-package bug
-
-import (
- "encoding/json"
- "testing"
- "time"
-
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-
- "github.com/MichaelMure/git-bug/identity"
- "github.com/MichaelMure/git-bug/repository"
-)
-
-func TestOperationPackSerialize(t *testing.T) {
- opp := &OperationPack{}
-
- repo := repository.NewMockRepoForTest()
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
- require.NoError(t, err)
-
- createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
- setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1")
- addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil)
- setStatusOp := NewSetStatusOp(rene, time.Now().Unix(), ClosedStatus)
- labelChangeOp := NewLabelChangeOperation(rene, time.Now().Unix(), []Label{"added"}, []Label{"removed"})
-
- opp.Append(createOp)
- opp.Append(setTitleOp)
- opp.Append(addCommentOp)
- opp.Append(setStatusOp)
- opp.Append(labelChangeOp)
-
- opMeta := NewSetTitleOp(rene, time.Now().Unix(), "title3", "title2")
- opMeta.SetMetadata("key", "value")
- opp.Append(opMeta)
-
- assert.Equal(t, 1, len(opMeta.Metadata))
-
- opFile := NewAddCommentOp(rene, time.Now().Unix(), "message", []repository.Hash{
- "abcdef",
- "ghijkl",
- })
- opp.Append(opFile)
-
- assert.Equal(t, 2, len(opFile.Files))
-
- data, err := json.Marshal(opp)
- assert.NoError(t, err)
-
- var opp2 *OperationPack
- err = json.Unmarshal(data, &opp2)
- assert.NoError(t, err)
-
- ensureIds(opp)
- ensureAuthors(t, opp, opp2)
-
- assert.Equal(t, opp, opp2)
-}
-
-func ensureIds(opp *OperationPack) {
- for _, op := range opp.Operations {
- op.Id()
- }
-}
-
-func ensureAuthors(t *testing.T, opp1 *OperationPack, opp2 *OperationPack) {
- require.Equal(t, len(opp1.Operations), len(opp2.Operations))
- for i := 0; i < len(opp1.Operations); i++ {
- op1 := opp1.Operations[i]
- op2 := opp2.Operations[i]
-
- // ensure we have equivalent authors (IdentityStub vs Identity) then
- // enforce equality
- require.Equal(t, op1.base().Author.Id(), op2.base().Author.Id())
- op1.base().Author = op2.base().Author
- }
-}
diff --git a/bug/operation_test.go b/bug/operation_test.go
index 20799bb1..619f2b43 100644
--- a/bug/operation_test.go
+++ b/bug/operation_test.go
@@ -11,7 +11,16 @@ import (
)
func TestValidate(t *testing.T) {
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
+ repo := repository.NewMockRepoClock()
+
+ makeIdentity := func(t *testing.T, name, email string) *identity.Identity {
+ i, err := identity.NewIdentity(repo, name, email)
+ require.NoError(t, err)
+ return i
+ }
+
+ rene := makeIdentity(t, "René Descartes", "rene@descartes.fr")
+
unix := time.Now().Unix()
good := []Operation{
@@ -30,13 +39,13 @@ func TestValidate(t *testing.T) {
bad := []Operation{
// opbase
- NewSetStatusOp(identity.NewIdentity("", "rene@descartes.fr"), unix, ClosedStatus),
- NewSetStatusOp(identity.NewIdentity("René Descartes\u001b", "rene@descartes.fr"), unix, ClosedStatus),
- NewSetStatusOp(identity.NewIdentity("René Descartes", "rene@descartes.fr\u001b"), unix, ClosedStatus),
- NewSetStatusOp(identity.NewIdentity("René \nDescartes", "rene@descartes.fr"), unix, ClosedStatus),
- NewSetStatusOp(identity.NewIdentity("René Descartes", "rene@\ndescartes.fr"), unix, ClosedStatus),
+ NewSetStatusOp(makeIdentity(t, "", "rene@descartes.fr"), unix, ClosedStatus),
+ NewSetStatusOp(makeIdentity(t, "René Descartes\u001b", "rene@descartes.fr"), unix, ClosedStatus),
+ NewSetStatusOp(makeIdentity(t, "René Descartes", "rene@descartes.fr\u001b"), unix, ClosedStatus),
+ NewSetStatusOp(makeIdentity(t, "René \nDescartes", "rene@descartes.fr"), unix, ClosedStatus),
+ NewSetStatusOp(makeIdentity(t, "René Descartes", "rene@\ndescartes.fr"), unix, ClosedStatus),
&CreateOperation{OpBase: OpBase{
- Author: rene,
+ Author_: rene,
UnixTime: 0,
OperationType: CreateOp,
},
@@ -68,7 +77,11 @@ func TestValidate(t *testing.T) {
}
func TestMetadata(t *testing.T) {
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
+ repo := repository.NewMockRepoClock()
+
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
+ require.NoError(t, err)
+
op := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil)
op.SetMetadata("key", "value")
@@ -83,13 +96,14 @@ func TestID(t *testing.T) {
defer repository.CleanupTestRepos(repo)
repos := []repository.ClockedRepo{
- repository.NewMockRepoForTest(),
+ repository.NewMockRepo(),
repo,
}
for _, repo := range repos {
- rene := identity.NewIdentity("René Descartes", "rene@descartes.fr")
- err := rene.Commit(repo)
+ rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr")
+ require.NoError(t, err)
+ err = rene.Commit(repo)
require.NoError(t, err)
b, op, err := Create(rene, time.Now().Unix(), "title", "message")
@@ -107,7 +121,7 @@ func TestID(t *testing.T) {
require.NoError(t, id2.Validate())
require.Equal(t, id1, id2)
- b2, err := ReadLocal(repo, b.Id())
+ b2, err := Read(repo, b.Id())
require.NoError(t, err)
op3 := b2.FirstOp()
diff --git a/bug/snapshot.go b/bug/snapshot.go
index 11df04b2..0005b930 100644
--- a/bug/snapshot.go
+++ b/bug/snapshot.go
@@ -28,6 +28,11 @@ type Snapshot struct {
// Return the Bug identifier
func (snap *Snapshot) Id() entity.Id {
+ if snap.id == "" {
+ // simply panic as it would be a coding error
+ // (using an id of a bug not stored yet)
+ panic("no id yet")
+ }
return snap.id
}
diff --git a/bug/sorting.go b/bug/sorting.go
index d1c370d3..2e64b92d 100644
--- a/bug/sorting.go
+++ b/bug/sorting.go
@@ -7,11 +7,11 @@ func (b BugsByCreationTime) Len() int {
}
func (b BugsByCreationTime) Less(i, j int) bool {
- if b[i].createTime < b[j].createTime {
+ if b[i].CreateLamportTime() < b[j].CreateLamportTime() {
return true
}
- if b[i].createTime > b[j].createTime {
+ if b[i].CreateLamportTime() > b[j].CreateLamportTime() {
return false
}
@@ -35,11 +35,11 @@ func (b BugsByEditTime) Len() int {
}
func (b BugsByEditTime) Less(i, j int) bool {
- if b[i].editTime < b[j].editTime {
+ if b[i].EditLamportTime() < b[j].EditLamportTime() {
return true
}
- if b[i].editTime > b[j].editTime {
+ if b[i].EditLamportTime() > b[j].EditLamportTime() {
return false
}
diff --git a/bug/with_snapshot.go b/bug/with_snapshot.go
index 2b2439df..9b706d61 100644
--- a/bug/with_snapshot.go
+++ b/bug/with_snapshot.go
@@ -47,12 +47,6 @@ func (b *WithSnapshot) Commit(repo repository.ClockedRepo) error {
return nil
}
- b.snap.id = b.Bug.id
+ b.snap.id = b.Bug.Id()
return nil
}
-
-// Merge intercept Bug.Merge() and clear the snapshot
-func (b *WithSnapshot) Merge(repo repository.Repo, other Interface) (bool, error) {
- b.snap = nil
- return b.Bug.Merge(repo, other)
-}