aboutsummaryrefslogtreecommitdiffstats
path: root/plumbing
diff options
context:
space:
mode:
authorSantiago M. Mola <santi@mola.io>2016-12-14 23:12:44 +0100
committerMáximo Cuadros <mcuadros@gmail.com>2016-12-14 23:12:44 +0100
commit0af572dd21c0aa79d13745b633ee24ba6c4d6cf1 (patch)
tree49e81e74e82d84fd88b2fc1e4b0dc7c7bfe9c40f /plumbing
parentdf0f38af83f972f026d7e14150f3d37b95f13484 (diff)
downloadgo-git-0af572dd21c0aa79d13745b633ee24ba6c4d6cf1.tar.gz
move plumbing from top level package to plumbing (#183)
* plumbing: rename Object -> EncodedObject. * plumbing/storer: rename ObjectStorer -> EncodedObjectStorer. * move difftree to plumbing/difftree. * move diff -> utils/diff * make Object/Tag/Blob/Tree/Commit/File depend on storer. * Object and its implementations now depend only on storer.EncodedObjectStorer, not git.Repository. * Tests are decoupled accordingly. * move Object/Commit/File/Tag/Tree to plumbing/object. * move Object/Commit/File/Tag/Tree to plumbing/object. * move checkClose to utils/ioutil. * move RevListObjects to plumbing/revlist.Objects. * move DiffTree to plumbing/difftree package. * rename files with plural nouns to singular * plumbing/object: add GetBlob/GetCommit/GetTag/GetTree.
Diffstat (limited to 'plumbing')
-rw-r--r--plumbing/difftree/difftree.go253
-rw-r--r--plumbing/difftree/difftree_test.go430
-rw-r--r--plumbing/difftree/internal/merkletrie/doc.go30
-rw-r--r--plumbing/difftree/internal/merkletrie/frame.go79
-rw-r--r--plumbing/difftree/internal/merkletrie/frame_test.go69
-rw-r--r--plumbing/difftree/internal/merkletrie/iter.go167
-rw-r--r--plumbing/difftree/internal/merkletrie/iter_fixtures_test.go330
-rw-r--r--plumbing/difftree/internal/merkletrie/iter_test.go176
-rw-r--r--plumbing/difftree/internal/merkletrie/node.go65
-rw-r--r--plumbing/difftree/internal/merkletrie/node_test.go68
-rw-r--r--plumbing/difftree/internal/merkletrie/noder.go20
-rw-r--r--plumbing/error.go (renamed from plumbing/errors.go)0
-rw-r--r--plumbing/format/packfile/decoder.go34
-rw-r--r--plumbing/format/packfile/decoder_test.go12
-rw-r--r--plumbing/format/packfile/diff_delta.go6
-rw-r--r--plumbing/format/packfile/encoder.go6
-rw-r--r--plumbing/format/packfile/encoder_test.go34
-rw-r--r--plumbing/format/packfile/object_pack.go11
-rw-r--r--plumbing/format/packfile/patch_delta.go2
-rw-r--r--plumbing/object.go2
-rw-r--r--plumbing/object/blob.go135
-rw-r--r--plumbing/object/blob_test.go96
-rw-r--r--plumbing/object/commit.go293
-rw-r--r--plumbing/object/commit_test.go169
-rw-r--r--plumbing/object/commit_walker.go67
-rw-r--r--plumbing/object/commit_walker_test.go34
-rw-r--r--plumbing/object/file.go116
-rw-r--r--plumbing/object/file_test.go249
-rw-r--r--plumbing/object/object.go217
-rw-r--r--plumbing/object/object_test.go199
-rw-r--r--plumbing/object/tag.go268
-rw-r--r--plumbing/object/tag_test.go169
-rw-r--r--plumbing/object/tree.go449
-rw-r--r--plumbing/object/tree_test.go1425
-rw-r--r--plumbing/revlist/revlist.go128
-rw-r--r--plumbing/revlist/revlist_test.go150
-rw-r--r--plumbing/storer/object.go131
-rw-r--r--plumbing/storer/object_test.go44
38 files changed, 5996 insertions, 137 deletions
diff --git a/plumbing/difftree/difftree.go b/plumbing/difftree/difftree.go
new file mode 100644
index 0000000..3bc4d63
--- /dev/null
+++ b/plumbing/difftree/difftree.go
@@ -0,0 +1,253 @@
+package git
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "sort"
+ "strings"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/object"
+)
+
+type Action int
+
+func (a Action) String() string {
+ switch a {
+ case Insert:
+ return "Insert"
+ case Delete:
+ return "Delete"
+ case Modify:
+ return "Modify"
+ default:
+ panic(fmt.Sprintf("unsupported action: %d", a))
+ }
+}
+
+const (
+ Insert Action = iota
+ Delete
+ Modify
+)
+
+type Change struct {
+ Action
+ From ChangeEntry
+ To ChangeEntry
+}
+
+type ChangeEntry struct {
+ Name string
+ Tree *object.Tree
+ TreeEntry object.TreeEntry
+}
+
+func (c *Change) Files() (from, to *object.File, err error) {
+ if c.Action == Insert || c.Action == Modify {
+ to, err = c.To.Tree.TreeEntryFile(&c.To.TreeEntry)
+ if err != nil {
+ return
+ }
+
+ }
+
+ if c.Action == Delete || c.Action == Modify {
+ from, err = c.From.Tree.TreeEntryFile(&c.From.TreeEntry)
+ if err != nil {
+ return
+ }
+ }
+
+ return
+}
+
+func (c *Change) String() string {
+ return fmt.Sprintf("<Action: %s, Path: %s>", c.Action, c.name())
+}
+
+func (c *Change) name() string {
+ if c.From.Name != "" {
+ return c.From.Name
+ }
+
+ return c.To.Name
+}
+
+type Changes []*Change
+
+func newEmpty() Changes {
+ return make([]*Change, 0, 0)
+}
+
+func DiffTree(a, b *object.Tree) ([]*Change, error) {
+ if a == b {
+ return newEmpty(), nil
+ }
+
+ if a == nil || b == nil {
+ return newWithEmpty(a, b)
+ }
+
+ return newDiffTree(a, b)
+}
+
+func (c Changes) Len() int {
+ return len(c)
+}
+
+func (c Changes) Swap(i, j int) {
+ c[i], c[j] = c[j], c[i]
+}
+
+func (c Changes) Less(i, j int) bool {
+ return strings.Compare(c[i].name(), c[j].name()) < 0
+}
+
+func (c Changes) String() string {
+ var buffer bytes.Buffer
+ buffer.WriteString("[")
+ comma := ""
+ for _, v := range c {
+ buffer.WriteString(comma)
+ buffer.WriteString(v.String())
+ comma = ", "
+ }
+ buffer.WriteString("]")
+
+ return buffer.String()
+}
+
+func newWithEmpty(a, b *object.Tree) (Changes, error) {
+ changes := newEmpty()
+
+ var action Action
+ var tree *object.Tree
+ if a == nil {
+ action = Insert
+ tree = b
+ } else {
+ action = Delete
+ tree = a
+ }
+
+ w := object.NewTreeWalker(tree, true)
+ defer w.Close()
+
+ for {
+ path, entry, err := w.Next()
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, fmt.Errorf("cannot get next file: %s", err)
+ }
+
+ if entry.Mode.IsDir() {
+ continue
+ }
+
+ c := &Change{Action: action}
+
+ if action == Insert {
+ c.To.Name = path
+ c.To.TreeEntry = entry
+ c.To.Tree = tree
+ } else {
+ c.From.Name = path
+ c.From.TreeEntry = entry
+ c.From.Tree = tree
+ }
+
+ changes = append(changes, c)
+ }
+
+ return changes, nil
+}
+
+// FIXME: this is very inefficient, but correct.
+// The proper way to do this is to implement a diff-tree algorithm,
+// while taking advantage of the tree hashes to avoid traversing
+// subtrees when the hash is equal in both inputs.
+func newDiffTree(a, b *object.Tree) ([]*Change, error) {
+ var result []*Change
+
+ aChanges, err := newWithEmpty(a, nil)
+ if err != nil {
+ return nil, fmt.Errorf("cannot create nil-diff of source tree: %s", err)
+ }
+ sort.Sort(aChanges)
+
+ bChanges, err := newWithEmpty(nil, b)
+ if err != nil {
+ return nil, fmt.Errorf("cannot create nil-diff of destination tree: %s", err)
+ }
+ sort.Sort(bChanges)
+
+ for len(aChanges) > 0 && len(bChanges) > 0 {
+ switch comp := strings.Compare(aChanges[0].name(), bChanges[0].name()); {
+ case comp == 0: // append as "Modify" or ignore if not changed
+ modified, err := hasChange(a, b, aChanges[0].name())
+ if err != nil {
+ return nil, err
+ }
+
+ if modified {
+ c := mergeInsertAndDeleteIntoModify(aChanges[0], bChanges[0])
+ result = append(result, c)
+ }
+
+ aChanges = aChanges[1:]
+ bChanges = bChanges[1:]
+ case comp < 0: // delete first a change
+ result = append(result, aChanges[0])
+ aChanges = aChanges[1:]
+ case comp > 0: // insert first b change
+ result = append(result, bChanges[0])
+ bChanges = bChanges[1:]
+ }
+ }
+
+ // append all remaining changes in aChanges, if any, as deletes
+ // append all remaining changes in bChanges, if any, as inserts
+ result = append(result, aChanges...)
+ result = append(result, bChanges...)
+
+ return result, nil
+}
+
+func mergeInsertAndDeleteIntoModify(a, b *Change) *Change {
+ c := &Change{Action: Modify}
+ c.From.Name = a.From.Name
+ c.From.Tree = a.From.Tree
+ c.From.TreeEntry = a.From.TreeEntry
+ c.To.Name = b.To.Name
+ c.To.Tree = b.To.Tree
+ c.To.TreeEntry = b.To.TreeEntry
+
+ return c
+}
+
+func hasChange(a, b *object.Tree, path string) (bool, error) {
+ ha, err := hash(a, path)
+ if err != nil {
+ return false, err
+ }
+
+ hb, err := hash(b, path)
+ if err != nil {
+ return false, err
+ }
+
+ return ha != hb, nil
+}
+
+func hash(tree *object.Tree, path string) (plumbing.Hash, error) {
+ file, err := tree.File(path)
+ if err != nil {
+ var empty plumbing.Hash
+ return empty, fmt.Errorf("cannot find file %s in tree: %s", path, err)
+ }
+
+ return file.Hash, nil
+}
diff --git a/plumbing/difftree/difftree_test.go b/plumbing/difftree/difftree_test.go
new file mode 100644
index 0000000..c95e879
--- /dev/null
+++ b/plumbing/difftree/difftree_test.go
@@ -0,0 +1,430 @@
+package git
+
+import (
+ "sort"
+ "testing"
+
+ "gopkg.in/src-d/go-git.v4/fixtures"
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/packfile"
+ "gopkg.in/src-d/go-git.v4/plumbing/object"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+ "gopkg.in/src-d/go-git.v4/storage/filesystem"
+ "gopkg.in/src-d/go-git.v4/storage/memory"
+
+ . "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) { TestingT(t) }
+
+type DiffTreeSuite struct {
+ fixtures.Suite
+ Storer storer.EncodedObjectStorer
+ Fixture *fixtures.Fixture
+ cache map[string]storer.EncodedObjectStorer
+}
+
+func (s *DiffTreeSuite) SetUpSuite(c *C) {
+ s.Suite.SetUpSuite(c)
+ s.Fixture = fixtures.Basic().One()
+ sto, err := filesystem.NewStorage(s.Fixture.DotGit())
+ c.Assert(err, IsNil)
+ s.Storer = sto
+ s.cache = make(map[string]storer.EncodedObjectStorer)
+}
+
+func (s *DiffTreeSuite) tree(c *C, h plumbing.Hash) *object.Tree {
+ t, err := object.GetTree(s.Storer, h)
+ c.Assert(err, IsNil)
+ return t
+}
+
+func (s *DiffTreeSuite) commitFromStorer(c *C, sto storer.EncodedObjectStorer,
+ h plumbing.Hash) *object.Commit {
+
+ commit, err := object.GetCommit(sto, h)
+ c.Assert(err, IsNil)
+ return commit
+}
+
+func (s *DiffTreeSuite) storageFromPackfile(f *fixtures.Fixture) storer.EncodedObjectStorer {
+ sto, ok := s.cache[f.URL]
+ if ok {
+ return sto
+ }
+
+ sto = memory.NewStorage()
+
+ pf := f.Packfile()
+
+ defer pf.Close()
+
+ n := packfile.NewScanner(pf)
+ d, err := packfile.NewDecoder(n, sto)
+ if err != nil {
+ panic(err)
+ }
+
+ _, err = d.Decode()
+ if err != nil {
+ panic(err)
+ }
+
+ s.cache[f.URL] = sto
+ return sto
+}
+
+var _ = Suite(&DiffTreeSuite{})
+
+func (s *DiffTreeSuite) TestActionString(c *C) {
+ expected := "Insert"
+ action := Insert
+ obtained := action.String()
+ c.Assert(obtained, Equals, expected)
+
+ expected = "Delete"
+ action = Delete
+ obtained = action.String()
+ c.Assert(obtained, Equals, expected)
+
+ expected = "Modify"
+ action = Modify
+ obtained = action.String()
+ c.Assert(obtained, Equals, expected)
+
+ action = 37
+ c.Assert(func() { _ = action.String() },
+ PanicMatches, "unsupported action: 37")
+}
+
+func (s *DiffTreeSuite) TestChangeFilesInsert(c *C) {
+ tree := s.tree(c, plumbing.NewHash("a8d315b2b1c615d43042c3a62402b8a54288cf5c"))
+
+ change := &Change{Action: Insert}
+ change.To.Name = "json/long.json"
+ change.To.Tree = tree
+ change.To.TreeEntry.Hash = plumbing.NewHash("49c6bb89b17060d7b4deacb7b338fcc6ea2352a9")
+
+ from, to, err := change.Files()
+ c.Assert(err, IsNil)
+ c.Assert(from, IsNil)
+ c.Assert(to.ID(), Equals, change.To.TreeEntry.Hash)
+}
+
+func (s *DiffTreeSuite) TestChangeFilesDelete(c *C) {
+ tree := s.tree(c, plumbing.NewHash("a8d315b2b1c615d43042c3a62402b8a54288cf5c"))
+
+ change := &Change{Action: Delete}
+ change.From.Name = "json/long.json"
+ change.From.Tree = tree
+ change.From.TreeEntry.Hash = plumbing.NewHash("49c6bb89b17060d7b4deacb7b338fcc6ea2352a9")
+
+ from, to, err := change.Files()
+ c.Assert(err, IsNil)
+ c.Assert(to, IsNil)
+ c.Assert(from.ID(), Equals, change.From.TreeEntry.Hash)
+}
+
+func (s *DiffTreeSuite) TestChangeFilesModify(c *C) {
+ tree := s.tree(c, plumbing.NewHash("a8d315b2b1c615d43042c3a62402b8a54288cf5c"))
+
+ change := &Change{Action: Modify}
+ change.To.Name = "json/long.json"
+ change.To.Tree = tree
+ change.To.TreeEntry.Hash = plumbing.NewHash("49c6bb89b17060d7b4deacb7b338fcc6ea2352a9")
+ change.From.Name = "json/long.json"
+ change.From.Tree = tree
+ change.From.TreeEntry.Hash = plumbing.NewHash("9a48f23120e880dfbe41f7c9b7b708e9ee62a492")
+
+ from, to, err := change.Files()
+ c.Assert(err, IsNil)
+ c.Assert(to.ID(), Equals, change.To.TreeEntry.Hash)
+ c.Assert(from.ID(), Equals, change.From.TreeEntry.Hash)
+}
+
+func (s *DiffTreeSuite) TestChangeString(c *C) {
+ expected := "<Action: Insert, Path: foo>"
+ change := &Change{Action: Insert}
+ change.From.Name = "foo"
+
+ obtained := change.String()
+ c.Assert(obtained, Equals, expected)
+}
+
+func (s *DiffTreeSuite) TestChangesString(c *C) {
+ expected := "[]"
+ changes := newEmpty()
+ obtained := changes.String()
+ c.Assert(obtained, Equals, expected)
+
+ expected = "[<Action: Modify, Path: bla>]"
+ changes = make([]*Change, 1)
+ changes[0] = &Change{Action: Modify}
+ changes[0].From.Name = "bla"
+
+ obtained = changes.String()
+ c.Assert(obtained, Equals, expected)
+
+ expected = "[<Action: Modify, Path: bla>, <Action: Insert, Path: foo/bar>]"
+ changes = make([]*Change, 2)
+ changes[0] = &Change{Action: Modify}
+ changes[0].From.Name = "bla"
+ changes[1] = &Change{Action: Insert}
+ changes[1].From.Name = "foo/bar"
+ obtained = changes.String()
+ c.Assert(obtained, Equals, expected)
+}
+
+type expectChange struct {
+ Action Action
+ Name string
+}
+
+func (s *DiffTreeSuite) TestDiffTree(c *C) {
+ for i, t := range []struct {
+ repository string // the repo name as in localRepos
+ commit1 string // the commit of the first tree
+ commit2 string // the commit of the second tree
+ expected []expectChange // the expected list of []changeExpect
+ }{{
+ "https://github.com/dezfowler/LiteMock.git",
+ "",
+ "",
+ []expectChange{},
+ }, {
+ "https://github.com/dezfowler/LiteMock.git",
+ "b7965eaa2c4f245d07191fe0bcfe86da032d672a",
+ "b7965eaa2c4f245d07191fe0bcfe86da032d672a",
+ []expectChange{},
+ }, {
+ "https://github.com/dezfowler/LiteMock.git",
+ "",
+ "b7965eaa2c4f245d07191fe0bcfe86da032d672a",
+ []expectChange{
+ {Action: Insert, Name: "README"},
+ },
+ }, {
+ "https://github.com/dezfowler/LiteMock.git",
+ "b7965eaa2c4f245d07191fe0bcfe86da032d672a",
+ "",
+ []expectChange{
+ {Action: Delete, Name: "README"},
+ },
+ }, {
+ "https://github.com/githubtraining/example-branches.git",
+ "",
+ "f0eb272cc8f77803478c6748103a1450aa1abd37",
+ []expectChange{
+ {Action: Insert, Name: "README.md"},
+ },
+ }, {
+ "https://github.com/githubtraining/example-branches.git",
+ "f0eb272cc8f77803478c6748103a1450aa1abd37",
+ "",
+ []expectChange{
+ {Action: Delete, Name: "README.md"},
+ },
+ }, {
+ "https://github.com/githubtraining/example-branches.git",
+ "f0eb272cc8f77803478c6748103a1450aa1abd37",
+ "f0eb272cc8f77803478c6748103a1450aa1abd37",
+ []expectChange{},
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "",
+ "9608eed92b3839b06ebf72d5043da547de10ce85",
+ []expectChange{
+ {Action: Insert, Name: "README"},
+ {Action: Insert, Name: "gem_builder.rb"},
+ {Action: Insert, Name: "gem_eval.rb"},
+ },
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "9608eed92b3839b06ebf72d5043da547de10ce85",
+ "",
+ []expectChange{
+ {Action: Delete, Name: "README"},
+ {Action: Delete, Name: "gem_builder.rb"},
+ {Action: Delete, Name: "gem_eval.rb"},
+ },
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "9608eed92b3839b06ebf72d5043da547de10ce85",
+ "9608eed92b3839b06ebf72d5043da547de10ce85",
+ []expectChange{},
+ }, {
+ "https://github.com/toqueteos/ts3.git",
+ "",
+ "764e914b75d6d6df1fc5d832aa9840f590abf1bb",
+ []expectChange{
+ {Action: Insert, Name: "README.markdown"},
+ {Action: Insert, Name: "examples/bot.go"},
+ {Action: Insert, Name: "examples/raw_shell.go"},
+ {Action: Insert, Name: "helpers.go"},
+ {Action: Insert, Name: "ts3.go"},
+ },
+ }, {
+ "https://github.com/toqueteos/ts3.git",
+ "764e914b75d6d6df1fc5d832aa9840f590abf1bb",
+ "",
+ []expectChange{
+ {Action: Delete, Name: "README.markdown"},
+ {Action: Delete, Name: "examples/bot.go"},
+ {Action: Delete, Name: "examples/raw_shell.go"},
+ {Action: Delete, Name: "helpers.go"},
+ {Action: Delete, Name: "ts3.go"},
+ },
+ }, {
+ "https://github.com/toqueteos/ts3.git",
+ "764e914b75d6d6df1fc5d832aa9840f590abf1bb",
+ "764e914b75d6d6df1fc5d832aa9840f590abf1bb",
+ []expectChange{},
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "9608eed92b3839b06ebf72d5043da547de10ce85",
+ "6c41e05a17e19805879689414026eb4e279f7de0",
+ []expectChange{
+ {Action: Modify, Name: "gem_eval.rb"},
+ },
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "6c41e05a17e19805879689414026eb4e279f7de0",
+ "89be3aac2f178719c12953cc9eaa23441f8d9371",
+ []expectChange{
+ {Action: Modify, Name: "gem_eval.rb"},
+ {Action: Insert, Name: "gem_eval_test.rb"},
+ {Action: Insert, Name: "security.rb"},
+ {Action: Insert, Name: "security_test.rb"},
+ },
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "89be3aac2f178719c12953cc9eaa23441f8d9371",
+ "597240b7da22d03ad555328f15abc480b820acc0",
+ []expectChange{
+ {Action: Modify, Name: "gem_eval.rb"},
+ },
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "597240b7da22d03ad555328f15abc480b820acc0",
+ "0260380e375d2dd0e1a8fcab15f91ce56dbe778e",
+ []expectChange{
+ {Action: Modify, Name: "gem_eval.rb"},
+ {Action: Modify, Name: "gem_eval_test.rb"},
+ {Action: Insert, Name: "lazy_dir.rb"},
+ {Action: Insert, Name: "lazy_dir_test.rb"},
+ {Action: Modify, Name: "security.rb"},
+ {Action: Modify, Name: "security_test.rb"},
+ },
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "0260380e375d2dd0e1a8fcab15f91ce56dbe778e",
+ "597240b7da22d03ad555328f15abc480b820acc0",
+ []expectChange{
+ {Action: Modify, Name: "gem_eval.rb"},
+ {Action: Modify, Name: "gem_eval_test.rb"},
+ {Action: Delete, Name: "lazy_dir.rb"},
+ {Action: Delete, Name: "lazy_dir_test.rb"},
+ {Action: Modify, Name: "security.rb"},
+ {Action: Modify, Name: "security_test.rb"},
+ },
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "0260380e375d2dd0e1a8fcab15f91ce56dbe778e",
+ "ca9fd470bacb6262eb4ca23ee48bb2f43711c1ff",
+ []expectChange{
+ {Action: Modify, Name: "gem_eval.rb"},
+ {Action: Modify, Name: "security.rb"},
+ {Action: Modify, Name: "security_test.rb"},
+ },
+ }, {
+ "https://github.com/github/gem-builder.git",
+ "fe3c86745f887c23a0d38c85cfd87ca957312f86",
+ "b7e3f636febf7a0cd3ab473b6d30081786d2c5b6",
+ []expectChange{
+ {Action: Modify, Name: "gem_eval.rb"},
+ {Action: Modify, Name: "gem_eval_test.rb"},
+ {Action: Insert, Name: "git_mock"},
+ {Action: Modify, Name: "lazy_dir.rb"},
+ {Action: Modify, Name: "lazy_dir_test.rb"},
+ {Action: Modify, Name: "security.rb"},
+ },
+ }, {
+ "https://github.com/rumpkernel/rumprun-xen.git",
+ "1831e47b0c6db750714cd0e4be97b5af17fb1eb0",
+ "51d8515578ea0c88cc8fc1a057903675cf1fc16c",
+ []expectChange{
+ {Action: Modify, Name: "Makefile"},
+ {Action: Modify, Name: "netbsd_init.c"},
+ {Action: Modify, Name: "rumphyper_stubs.c"},
+ {Action: Delete, Name: "sysproxy.c"},
+ },
+ }, {
+ "https://github.com/rumpkernel/rumprun-xen.git",
+ "1831e47b0c6db750714cd0e4be97b5af17fb1eb0",
+ "e13e678f7ee9badd01b120889e0ec5fdc8ae3802",
+ []expectChange{
+ {Action: Modify, Name: "app-tools/rumprun"},
+ },
+ }} {
+ f := fixtures.ByURL(t.repository).One()
+ sto := s.storageFromPackfile(f)
+
+ var tree1, tree2 *object.Tree
+ var err error
+ if t.commit1 != "" {
+ tree1, err = s.commitFromStorer(c, sto,
+ plumbing.NewHash(t.commit1)).Tree()
+ c.Assert(err, IsNil,
+ Commentf("subtest %d: unable to retrieve tree from commit %s and repo %s: %s", i, t.commit1, t.repository, err))
+ }
+
+ if t.commit2 != "" {
+ tree2, err = s.commitFromStorer(c, sto,
+ plumbing.NewHash(t.commit2)).Tree()
+ c.Assert(err, IsNil,
+ Commentf("subtest %d: unable to retrieve tree from commit %s and repo %s", i, t.commit2, t.repository, err))
+ }
+
+ obtained, err := DiffTree(tree1, tree2)
+ c.Assert(err, IsNil,
+ Commentf("subtest %d: unable to calculate difftree: %s", i, err))
+ c.Assert(equalChanges(obtained, t.expected), Equals, true,
+ Commentf("subtest:%d\nrepo=%s\ncommit1=%s\ncommit2=%s\nexpected=%s\nobtained=%s",
+ i, t.repository, t.commit1, t.commit2, t.expected, obtained))
+
+ assertChanges(obtained, c)
+ }
+}
+
+func assertChanges(a Changes, c *C) {
+ for _, changes := range a {
+ switch changes.Action {
+ case Insert:
+ c.Assert(changes.From.Tree, IsNil)
+ c.Assert(changes.To.Tree, NotNil)
+ case Delete:
+ c.Assert(changes.From.Tree, NotNil)
+ c.Assert(changes.To.Tree, IsNil)
+ case Modify:
+ c.Assert(changes.From.Tree, NotNil)
+ c.Assert(changes.To.Tree, NotNil)
+ }
+ }
+}
+
+func equalChanges(a Changes, b []expectChange) bool {
+ if len(a) != len(b) {
+ return false
+ }
+
+ sort.Sort(a)
+
+ for i, va := range a {
+ vb := b[i]
+ if va.Action != vb.Action || va.name() != vb.Name {
+ return false
+ }
+ }
+
+ return true
+}
diff --git a/plumbing/difftree/internal/merkletrie/doc.go b/plumbing/difftree/internal/merkletrie/doc.go
new file mode 100644
index 0000000..f8c3b2f
--- /dev/null
+++ b/plumbing/difftree/internal/merkletrie/doc.go
@@ -0,0 +1,30 @@
+package merkletrie
+
+/*
+Package merkletrie gives support for n-ary trees that are at the same
+time Merkle trees and Radix trees, and provides an efficient tree
+comparison algorithm for them.
+
+Git trees are Radix n-ary trees in virtue of the names of their
+tree entries. At the same time, git trees are Merkle trees thanks to
+their hashes.
+
+When comparing git trees, the simple approach of alphabetically sorting
+their elements and comparing the resulting lists is not enough as it
+depends linearly on the number of files in the trees: When a directory
+has lots of files but none of them has been modified, this approach is
+very expensive. We can do better by prunning whole directories that
+have not change, by just by looking at their hashes. This package
+provides the tools to do exactly that.
+
+This package defines Radix-Merkle trees as nodes that should have:
+- a hash: the Merkle part of the Radix-Merkle tree
+- a key: the Radix part of the Radix-Merkle tree
+
+The Merkle hash condition is not enforced by this package though. This
+means that node hashes doesn't have to take into account the hashes of
+their children, which is good for testing purposes.
+
+Nodes in the Radix-Merkle tree are abstracted by the Noder interface.
+The intended use is that git.Tree implements this interface.
+*/
diff --git a/plumbing/difftree/internal/merkletrie/frame.go b/plumbing/difftree/internal/merkletrie/frame.go
new file mode 100644
index 0000000..a40c6ad
--- /dev/null
+++ b/plumbing/difftree/internal/merkletrie/frame.go
@@ -0,0 +1,79 @@
+package merkletrie
+
+import (
+ "bytes"
+ "fmt"
+)
+
+const sep = "/"
+
+// A frame represents siblings in a trie, along with the path to get to
+// them. For example the frame for the node with key `b` in this trie:
+//
+// a
+// / \
+// / \
+// / \
+// b c
+// /|\ / \
+// y z x d e
+// |
+// g
+//
+// would be:
+//
+// f := frame{
+// base: "a/b", // path to the siblings
+// stack: []Node{z, y, x} // in reverse alphabetical order
+// }
+type frame struct {
+ base string // absolute key of their parents
+ stack []Noder // siblings, sorted in reverse alphabetical order by key
+}
+
+// newFrame returns a frame for the children of a node n.
+func newFrame(parentAbsoluteKey string, n Noder) *frame {
+ return &frame{
+ base: parentAbsoluteKey + sep + n.Key(),
+ stack: n.Children(),
+ }
+}
+
+func (f *frame) String() string {
+ var buf bytes.Buffer
+ _, _ = buf.WriteString(fmt.Sprintf("base=%q, stack=[", f.base))
+
+ sep := ""
+ for _, e := range f.stack {
+ _, _ = buf.WriteString(sep)
+ sep = ", "
+ _, _ = buf.WriteString(fmt.Sprintf("%q", e.Key()))
+ }
+
+ _ = buf.WriteByte(']')
+
+ return buf.String()
+}
+
+func (f *frame) top() (Noder, bool) {
+ if len(f.stack) == 0 {
+ return nil, false
+ }
+
+ top := len(f.stack) - 1
+
+ return f.stack[top], true
+}
+
+func (f *frame) pop() (Noder, bool) {
+ if len(f.stack) == 0 {
+ return nil, false
+ }
+
+ top := len(f.stack) - 1
+ ret := f.stack[top]
+ f.stack[top] = nil
+ f.stack = f.stack[:top]
+
+ return ret, true
+}
diff --git a/plumbing/difftree/internal/merkletrie/frame_test.go b/plumbing/difftree/internal/merkletrie/frame_test.go
new file mode 100644
index 0000000..0ef036a
--- /dev/null
+++ b/plumbing/difftree/internal/merkletrie/frame_test.go
@@ -0,0 +1,69 @@
+package merkletrie
+
+import . "gopkg.in/check.v1"
+
+type FrameSuite struct{}
+
+var _ = Suite(&FrameSuite{})
+
+func (s *FrameSuite) TestNewFrameFromLeaf(c *C) {
+ n := newNode(
+ []byte("hash"),
+ "key",
+ []*node{},
+ )
+
+ frame := newFrame("foo", n)
+
+ expectedString := `base="foo/key", stack=[]`
+ c.Assert(frame.String(), Equals, expectedString)
+
+ obtainedTopNode, obtainedTopOK := frame.top()
+ c.Assert(obtainedTopNode, IsNil)
+ c.Assert(obtainedTopOK, Equals, false)
+
+ obtainedPopNode, obtainedPopOK := frame.top()
+ c.Assert(obtainedPopNode, IsNil)
+ c.Assert(obtainedPopOK, Equals, false)
+}
+
+func (s *FrameSuite) TestNewFrameFromParent(c *C) {
+ leaf0 := newNode([]byte("leaf0 hash"), "leaf0 key", []*node{})
+ leaf1 := newNode([]byte("leaf1 hash"), "leaf1 key", []*node{})
+ leaf2 := newNode([]byte("leaf2 hash"), "leaf2 key", []*node{})
+ leaf3 := newNode([]byte("leaf3 hash"), "leaf3 key", []*node{})
+ parent := newNode(
+ []byte("parent hash"),
+ "parent key",
+ []*node{leaf3, leaf0, leaf2, leaf1}, // not alphabetically sorted
+ )
+
+ frame := newFrame("foo", parent)
+
+ expectedString := `base="foo/parent key", stack=["leaf3 key", "leaf2 key", "leaf1 key", "leaf0 key"]`
+ c.Assert(frame.String(), Equals, expectedString)
+
+ checkTopAndPop(c, frame, leaf0, true)
+ checkTopAndPop(c, frame, leaf1, true)
+ checkTopAndPop(c, frame, leaf2, true)
+ checkTopAndPop(c, frame, leaf3, true)
+ checkTopAndPop(c, frame, nil, false)
+}
+
+func checkTopAndPop(c *C, f *frame, expectedNode *node, expectedOK bool) {
+ n, ok := f.top()
+ if expectedNode == nil {
+ c.Assert(n, IsNil)
+ } else {
+ c.Assert(n, DeepEquals, expectedNode)
+ }
+ c.Assert(ok, Equals, expectedOK)
+
+ n, ok = f.pop()
+ if expectedNode == nil {
+ c.Assert(n, IsNil)
+ } else {
+ c.Assert(n, DeepEquals, expectedNode)
+ }
+ c.Assert(ok, Equals, expectedOK)
+}
diff --git a/plumbing/difftree/internal/merkletrie/iter.go b/plumbing/difftree/internal/merkletrie/iter.go
new file mode 100644
index 0000000..e175966
--- /dev/null
+++ b/plumbing/difftree/internal/merkletrie/iter.go
@@ -0,0 +1,167 @@
+package merkletrie
+
+// Iter is a radix tree iterator that will traverse the trie in
+// depth-first pre-order. Entries are traversed in (case-sensitive)
+// alphabetical order for each level.
+//
+// This is the kind of traversal you will expect when listing
+// ordinary files and directories recursively, for example:
+//
+// Trie Traversal order
+// ---- ---------------
+// .
+// / | \ a
+// / | \ b
+// b a z ===> b/a
+// / \ b/c
+// c a z
+//
+//
+// The Step method will return the next item, the Next method will do
+// the same but without descending deeper into the tree (i.e. skipping
+// the contents of "directories").
+//
+// The name of the type and its methods are based on the well known "next"
+// and "step" operations, quite common in debuggers, like gdb.
+type Iter struct {
+ // tells if the iteration has started.
+ hasStarted bool
+ // Each level of the tree is represented as a frame, this stack
+ // keeps track of the frames wrapping the current iterator position.
+ // The iterator will "step" into a node by adding its frame to the
+ // stack, or go to the next element at the same level by poping the
+ // current frame.
+ frameStack []*frame
+}
+
+// NewIter returns a new iterator for the trie with its root at n.
+func NewIter(n Noder) *Iter {
+ ret := &Iter{}
+ ret.push(newFrame("", n))
+
+ return ret
+}
+
+func (iter *Iter) top() (*frame, bool) {
+ if len(iter.frameStack) == 0 {
+ return nil, false
+ }
+
+ top := len(iter.frameStack) - 1
+
+ return iter.frameStack[top], true
+}
+
+func (iter *Iter) pop() (*frame, bool) {
+ if len(iter.frameStack) == 0 {
+ return nil, false
+ }
+
+ top := len(iter.frameStack) - 1
+ ret := iter.frameStack[top]
+ iter.frameStack[top] = nil
+ iter.frameStack = iter.frameStack[:top]
+
+ return ret, true
+}
+
+func (iter *Iter) push(f *frame) {
+ iter.frameStack = append(iter.frameStack, f)
+}
+
+const (
+ descend = true
+ dontDescend = false
+)
+
+// Next returns the next node without descending deeper into the tree
+// and true. If there are no more entries it returns nil and false.
+func (iter *Iter) Next() (Noder, bool) {
+ return iter.advance(dontDescend)
+}
+
+// Step returns the next node in the tree, descending deeper into it if
+// needed. If there are no more nodes in the tree, it returns nil and
+// false.
+func (iter *Iter) Step() (Noder, bool) {
+ return iter.advance(descend)
+}
+
+// advances the iterator in whatever direction you want: descend or
+// dontDescend.
+func (iter *Iter) advance(mustDescend bool) (Noder, bool) {
+ node, ok := iter.current()
+ if !ok {
+ return nil, false
+ }
+
+ // The first time we just return the current node.
+ if !iter.hasStarted {
+ iter.hasStarted = true
+ return node, ok
+ }
+ // following advances will involve dropping already seen nodes
+ // or getting into their children
+
+ ignoreChildren := node.NumChildren() == 0 || !mustDescend
+ if ignoreChildren {
+ // if we must ignore the current node children, just drop
+ // it and find the next one in the existing frames.
+ _ = iter.drop()
+ node, ok = iter.current()
+ return node, ok
+ }
+
+ // if we must descend into the current's node children, drop the
+ // parent and add a new frame with its children.
+ _ = iter.drop()
+ iter.push(newFrame(node.Key(), node))
+ node, _ = iter.current()
+
+ return node, true
+}
+
+// returns the current frame and the current node (i.e. the ones at the
+// top of their respective stacks.
+func (iter *Iter) current() (Noder, bool) {
+ f, ok := iter.top()
+ if !ok {
+ return nil, false
+ }
+
+ n, ok := f.top()
+ if !ok {
+ return nil, false
+ }
+
+ return n, true
+}
+
+// removes the current node and all the frames that become empty as a
+// consecuence of this action. It returns true if something was dropped,
+// and false if there were no more nodes in the iterator.
+func (iter *Iter) drop() bool {
+ frame, ok := iter.top()
+ if !ok {
+ return false
+ }
+
+ _, ok = frame.pop()
+ if !ok {
+ return false
+ }
+
+ for { // remove empty frames
+ if len(frame.stack) != 0 {
+ break
+ }
+
+ _, _ = iter.pop()
+ frame, ok = iter.top()
+ if !ok {
+ break
+ }
+ }
+
+ return true
+}
diff --git a/plumbing/difftree/internal/merkletrie/iter_fixtures_test.go b/plumbing/difftree/internal/merkletrie/iter_fixtures_test.go
new file mode 100644
index 0000000..20bddaf
--- /dev/null
+++ b/plumbing/difftree/internal/merkletrie/iter_fixtures_test.go
@@ -0,0 +1,330 @@
+package merkletrie
+
+// this files contains fixtures for testing the Iter.
+//
+// - iter... functions returns iterators for newly created trees
+// for example:
+//
+// + iterLeaf returns an iterator for simple tree with just the root.
+//
+// + iter2Horizontal returns an iterator for a tree with 2 nodes, both
+// childs of the root.
+//
+// - runs... contains sets of tests, indexed by a string that helps
+// to understand each test: "nsn" means next, then step, then next
+// again. The test also contains the expected keys of the nodes you
+// will get when calling the operations over the correspoding trees:
+// Example: runs2HorizontalSorted with iter2HorizontalSorted and so on.
+
+func iterLeaf() *Iter {
+ root := newNode(hash, "root", empty)
+ return NewIter(root)
+}
+
+var runs0 = map[string][]test{
+ "nn": {{next, ""}, {next, ""}},
+ "ns": {{next, ""}, {step, ""}},
+ "sn": {{step, ""}, {next, ""}},
+ "ss": {{step, ""}, {step, ""}},
+}
+
+// root
+// |
+// a
+func iter1() *Iter {
+ a := newNode(hash, "a", empty)
+ root := newNode(hash, "root", []*node{a})
+ return NewIter(root)
+}
+
+var runs1 = map[string][]test{
+ "nn": {{next, "a"}, {next, ""}},
+ "ns": {{next, "a"}, {step, ""}},
+ "sn": {{step, "a"}, {next, ""}},
+ "ss": {{step, "a"}, {step, ""}},
+}
+
+// root
+// / \
+// a b
+func iter2HorizontalSorted() *Iter {
+ a := newNode(hash, "a", empty)
+ b := newNode(hash, "b", empty)
+ root := newNode(hash, "root", []*node{a, b})
+ return NewIter(root)
+}
+
+// root
+// / \
+// b a
+func iter2HorizontalReverse() *Iter {
+ a := newNode(hash, "a", empty)
+ b := newNode(hash, "b", empty)
+ root := newNode(hash, "root", []*node{b, a})
+ return NewIter(root)
+}
+
+var runs2Horizontal = map[string][]test{
+ "nnn": {{next, "a"}, {next, "b"}, {next, ""}},
+ "nns": {{next, "a"}, {next, "b"}, {step, ""}},
+ "nsn": {{next, "a"}, {step, "b"}, {next, ""}},
+ "nss": {{next, "a"}, {step, "b"}, {step, ""}},
+ "snn": {{step, "a"}, {next, "b"}, {next, ""}},
+ "sns": {{step, "a"}, {next, "b"}, {step, ""}},
+ "ssn": {{step, "a"}, {step, "b"}, {next, ""}},
+ "sss": {{step, "a"}, {step, "b"}, {step, ""}},
+}
+
+// root
+// |
+// a
+// |
+// b
+func iter2VerticalSorted() *Iter {
+ b := newNode(hash, "b", empty)
+ a := newNode(hash, "a", []*node{b})
+ root := newNode(hash, "root", []*node{a})
+ return NewIter(root)
+}
+
+var runs2VerticalSorted = map[string][]test{
+ "nnn": {{next, "a"}, {next, ""}, {next, ""}},
+ "nns": {{next, "a"}, {next, ""}, {step, ""}},
+ "nsn": {{next, "a"}, {step, "b"}, {next, ""}},
+ "nss": {{next, "a"}, {step, "b"}, {step, ""}},
+ "snn": {{step, "a"}, {next, ""}, {next, ""}},
+ "sns": {{step, "a"}, {next, ""}, {step, ""}},
+ "ssn": {{step, "a"}, {step, "b"}, {next, ""}},
+ "sss": {{step, "a"}, {step, "b"}, {step, ""}},
+}
+
+// root
+// |
+// b
+// |
+// a
+func iter2VerticalReverse() *Iter {
+ a := newNode(hash, "a", empty)
+ b := newNode(hash, "b", []*node{a})
+ root := newNode(hash, "root", []*node{b})
+ return NewIter(root)
+}
+
+var runs2VerticalReverse = map[string][]test{
+ "nnn": {{next, "b"}, {next, ""}, {next, ""}},
+ "nns": {{next, "b"}, {next, ""}, {step, ""}},
+ "nsn": {{next, "b"}, {step, "a"}, {next, ""}},
+ "nss": {{next, "b"}, {step, "a"}, {step, ""}},
+ "snn": {{step, "b"}, {next, ""}, {next, ""}},
+ "sns": {{step, "b"}, {next, ""}, {step, ""}},
+ "ssn": {{step, "b"}, {step, "a"}, {next, ""}},
+ "sss": {{step, "b"}, {step, "a"}, {step, ""}},
+}
+
+// root
+// /|\
+// c a b
+func iter3Horizontal() *Iter {
+ a := newNode(hash, "a", empty)
+ b := newNode(hash, "b", empty)
+ c := newNode(hash, "c", empty)
+ root := newNode(hash, "root", []*node{c, a, b})
+ return NewIter(root)
+}
+
+var runs3Horizontal = map[string][]test{
+ "nnnn": {{next, "a"}, {next, "b"}, {next, "c"}, {next, ""}},
+ "nnns": {{next, "a"}, {next, "b"}, {next, "c"}, {step, ""}},
+ "nnsn": {{next, "a"}, {next, "b"}, {step, "c"}, {next, ""}},
+ "nnss": {{next, "a"}, {next, "b"}, {step, "c"}, {step, ""}},
+ "nsnn": {{next, "a"}, {step, "b"}, {next, "c"}, {next, ""}},
+ "nsns": {{next, "a"}, {step, "b"}, {next, "c"}, {step, ""}},
+ "nssn": {{next, "a"}, {step, "b"}, {step, "c"}, {next, ""}},
+ "nsss": {{next, "a"}, {step, "b"}, {step, "c"}, {step, ""}},
+ "snnn": {{step, "a"}, {next, "b"}, {next, "c"}, {next, ""}},
+ "snns": {{step, "a"}, {next, "b"}, {next, "c"}, {step, ""}},
+ "snsn": {{step, "a"}, {next, "b"}, {step, "c"}, {next, ""}},
+ "snss": {{step, "a"}, {next, "b"}, {step, "c"}, {step, ""}},
+ "ssnn": {{step, "a"}, {step, "b"}, {next, "c"}, {next, ""}},
+ "ssns": {{step, "a"}, {step, "b"}, {next, "c"}, {step, ""}},
+ "sssn": {{step, "a"}, {step, "b"}, {step, "c"}, {next, ""}},
+ "ssss": {{step, "a"}, {step, "b"}, {step, "c"}, {step, ""}},
+}
+
+// root
+// |
+// b
+// |
+// c
+// |
+// a
+func iter3Vertical() *Iter {
+ a := newNode(hash, "a", empty)
+ c := newNode(hash, "c", []*node{a})
+ b := newNode(hash, "b", []*node{c})
+ root := newNode(hash, "root", []*node{b})
+ return NewIter(root)
+}
+
+var runs3Vertical = map[string][]test{
+ "nnnn": {{next, "b"}, {next, ""}, {next, ""}, {next, ""}},
+ "nnns": {{next, "b"}, {next, ""}, {next, ""}, {step, ""}},
+ "nnsn": {{next, "b"}, {next, ""}, {step, ""}, {next, ""}},
+ "nnss": {{next, "b"}, {next, ""}, {step, ""}, {step, ""}},
+ "nsnn": {{next, "b"}, {step, "c"}, {next, ""}, {next, ""}},
+ "nsns": {{next, "b"}, {step, "c"}, {next, ""}, {step, ""}},
+ "nssn": {{next, "b"}, {step, "c"}, {step, "a"}, {next, ""}},
+ "nsss": {{next, "b"}, {step, "c"}, {step, "a"}, {step, ""}},
+ "snnn": {{step, "b"}, {next, ""}, {next, ""}, {next, ""}},
+ "snns": {{step, "b"}, {next, ""}, {next, ""}, {step, ""}},
+ "snsn": {{step, "b"}, {next, ""}, {step, ""}, {next, ""}},
+ "snss": {{step, "b"}, {next, ""}, {step, ""}, {step, ""}},
+ "ssnn": {{step, "b"}, {step, "c"}, {next, ""}, {next, ""}},
+ "ssns": {{step, "b"}, {step, "c"}, {next, ""}, {step, ""}},
+ "sssn": {{step, "b"}, {step, "c"}, {step, "a"}, {next, ""}},
+ "ssss": {{step, "b"}, {step, "c"}, {step, "a"}, {step, ""}},
+}
+
+// root
+// / \
+// c a
+// |
+// b
+func iter3Mix1() *Iter {
+ a := newNode(hash, "a", empty)
+ b := newNode(hash, "b", empty)
+ c := newNode(hash, "c", []*node{b})
+ root := newNode(hash, "root", []*node{c, a})
+ return NewIter(root)
+}
+
+var runs3Mix1 = map[string][]test{
+ "nnnn": {{next, "a"}, {next, "c"}, {next, ""}, {next, ""}},
+ "nnns": {{next, "a"}, {next, "c"}, {next, ""}, {step, ""}},
+ "nnsn": {{next, "a"}, {next, "c"}, {step, "b"}, {next, ""}},
+ "nnss": {{next, "a"}, {next, "c"}, {step, "b"}, {step, ""}},
+ "nsnn": {{next, "a"}, {step, "c"}, {next, ""}, {next, ""}},
+ "nsns": {{next, "a"}, {step, "c"}, {next, ""}, {step, ""}},
+ "nssn": {{next, "a"}, {step, "c"}, {step, "b"}, {next, ""}},
+ "nsss": {{next, "a"}, {step, "c"}, {step, "b"}, {step, ""}},
+ "snnn": {{step, "a"}, {next, "c"}, {next, ""}, {next, ""}},
+ "snns": {{step, "a"}, {next, "c"}, {next, ""}, {step, ""}},
+ "snsn": {{step, "a"}, {next, "c"}, {step, "b"}, {next, ""}},
+ "snss": {{step, "a"}, {next, "c"}, {step, "b"}, {step, ""}},
+ "ssnn": {{step, "a"}, {step, "c"}, {next, ""}, {next, ""}},
+ "ssns": {{step, "a"}, {step, "c"}, {next, ""}, {step, ""}},
+ "sssn": {{step, "a"}, {step, "c"}, {step, "b"}, {next, ""}},
+ "ssss": {{step, "a"}, {step, "c"}, {step, "b"}, {step, ""}},
+}
+
+// root
+// / \
+// b a
+// |
+// c
+func iter3Mix2() *Iter {
+ b := newNode(hash, "b", empty)
+ c := newNode(hash, "c", empty)
+ a := newNode(hash, "a", []*node{c})
+ root := newNode(hash, "root", []*node{b, a})
+ return NewIter(root)
+}
+
+var runs3Mix2 = map[string][]test{
+ "nnnn": {{next, "a"}, {next, "b"}, {next, ""}, {next, ""}},
+ "nnns": {{next, "a"}, {next, "b"}, {next, ""}, {step, ""}},
+ "nnsn": {{next, "a"}, {next, "b"}, {step, ""}, {next, ""}},
+ "nnss": {{next, "a"}, {next, "b"}, {step, ""}, {step, ""}},
+ "nsnn": {{next, "a"}, {step, "c"}, {next, "b"}, {next, ""}},
+ "nsns": {{next, "a"}, {step, "c"}, {next, "b"}, {step, ""}},
+ "nssn": {{next, "a"}, {step, "c"}, {step, "b"}, {next, ""}},
+ "nsss": {{next, "a"}, {step, "c"}, {step, "b"}, {step, ""}},
+ "snnn": {{step, "a"}, {next, "b"}, {next, ""}, {next, ""}},
+ "snns": {{step, "a"}, {next, "b"}, {next, ""}, {step, ""}},
+ "snsn": {{step, "a"}, {next, "b"}, {step, ""}, {next, ""}},
+ "snss": {{step, "a"}, {next, "b"}, {step, ""}, {step, ""}},
+ "ssnn": {{step, "a"}, {step, "c"}, {next, "b"}, {next, ""}},
+ "ssns": {{step, "a"}, {step, "c"}, {next, "b"}, {step, ""}},
+ "sssn": {{step, "a"}, {step, "c"}, {step, "b"}, {next, ""}},
+ "ssss": {{step, "a"}, {step, "c"}, {step, "b"}, {step, ""}},
+}
+
+// root
+// / | \
+// / | ----
+// f d h --------
+// /\ / \ |
+// e a j b g
+// | / \ |
+// l n k icm
+// |
+// o
+// |
+// p
+func iterCrazy() *Iter {
+ l := newNode(hash, "l", empty)
+ e := newNode(hash, "e", []*node{l})
+
+ p := newNode(hash, "p", empty)
+ o := newNode(hash, "o", []*node{p})
+ n := newNode(hash, "n", []*node{o})
+ k := newNode(hash, "k", empty)
+ a := newNode(hash, "a", []*node{n, k})
+ f := newNode(hash, "f", []*node{e, a})
+
+ d := newNode(hash, "d", empty)
+
+ i := newNode(hash, "i", empty)
+ c := newNode(hash, "c", empty)
+ m := newNode(hash, "m", empty)
+ j := newNode(hash, "j", []*node{i, c, m})
+ b := newNode(hash, "b", empty)
+ g := newNode(hash, "g", empty)
+ h := newNode(hash, "h", []*node{j, b, g})
+
+ root := newNode(hash, "root", []*node{f, d, h})
+ return NewIter(root)
+}
+
+var (
+ n = next
+ s = step
+)
+
+var runsCrazy = map[string][]test{
+ "nn nn n": {{n, "d"}, {n, "f"}, {n, "h"}, {n, ""}, {n, ""}},
+ "nn nn s": {{n, "d"}, {n, "f"}, {n, "h"}, {n, ""}, {s, ""}},
+ "nn ns n": {{n, "d"}, {n, "f"}, {n, "h"}, {s, "b"}, {n, "g"}},
+ "nn ns s": {{n, "d"}, {n, "f"}, {n, "h"}, {s, "b"}, {s, "g"}},
+ "nn sn n": {{n, "d"}, {n, "f"}, {s, "a"}, {n, "e"}, {n, "h"}},
+ "nn sn s": {{n, "d"}, {n, "f"}, {s, "a"}, {n, "e"}, {s, "l"}},
+ "nn ss n": {{n, "d"}, {n, "f"}, {s, "a"}, {s, "k"}, {n, "n"}},
+ "nn ss s": {{n, "d"}, {n, "f"}, {s, "a"}, {s, "k"}, {s, "n"}},
+ "ns nn n": {{n, "d"}, {s, "f"}, {n, "h"}, {n, ""}, {n, ""}},
+ "ns nn s": {{n, "d"}, {s, "f"}, {n, "h"}, {n, ""}, {s, ""}},
+ "ns ns n": {{n, "d"}, {s, "f"}, {n, "h"}, {s, "b"}, {n, "g"}},
+ "ns ns s": {{n, "d"}, {s, "f"}, {n, "h"}, {s, "b"}, {s, "g"}},
+ "ns sn n": {{n, "d"}, {s, "f"}, {s, "a"}, {n, "e"}, {n, "h"}},
+
+ "ns ss ns ss": {
+ {n, "d"}, {s, "f"},
+ {s, "a"}, {s, "k"},
+ {n, "n"}, {s, "o"},
+ {s, "p"}, {s, "e"},
+ },
+
+ "ns ss ns sn": {
+ {n, "d"}, {s, "f"},
+ {s, "a"}, {s, "k"},
+ {n, "n"}, {s, "o"},
+ {s, "p"}, {n, "e"},
+ },
+
+ "nn ns ns ss nn": {
+ {n, "d"}, {n, "f"},
+ {n, "h"}, {s, "b"},
+ {n, "g"}, {s, "j"},
+ {s, "c"}, {s, "i"},
+ {n, "m"}, {n, ""},
+ },
+}
diff --git a/plumbing/difftree/internal/merkletrie/iter_test.go b/plumbing/difftree/internal/merkletrie/iter_test.go
new file mode 100644
index 0000000..65116e1
--- /dev/null
+++ b/plumbing/difftree/internal/merkletrie/iter_test.go
@@ -0,0 +1,176 @@
+package merkletrie
+
+import . "gopkg.in/check.v1"
+
+type IterSuite struct{}
+
+var _ = Suite(&IterSuite{})
+
+// we don't care about hashes for iterating the tree, so
+// use this hash for every object
+var hash = []byte{}
+
+// leafs have no children, use this empty list.
+var empty = []*node{}
+
+// test a defined as an operation to run on an iterator and the key of
+// the node expected to be returned by the operation. Use "" as the
+// expected key for when there are no more objects in the tree.
+type test struct {
+ operation int // next or step
+ expected string // key of the expected node, "" for nil node
+}
+
+// test.operation
+const (
+ next = iota
+ step
+)
+
+// goes over a list of tests, calling each operation on the iter and
+// checking that the obtained result is equal to the expected result
+func runTests(c *C, description string, iter *Iter, list []test) {
+ var obtained Noder
+ var ok bool
+ var comment CommentInterface
+
+ for i, t := range list {
+ comment = Commentf("description %q, operation #%d",
+ description, i+1)
+
+ switch t.operation {
+ case next:
+ obtained, ok = iter.Next()
+ case step:
+ obtained, ok = iter.Step()
+ default:
+ c.Fatalf("invalid operation %d", t.operation)
+ }
+
+ if t.expected == "" {
+ c.Assert(ok, Equals, false, comment)
+ c.Assert(obtained, IsNil, comment)
+ } else {
+ c.Assert(ok, Equals, true, comment)
+ c.Assert(obtained.Key(), Equals, t.expected, comment)
+ }
+ }
+}
+
+// a simple tree consisting on just a leaf
+func (s *IterSuite) TestLeaf(c *C) {
+ for description, tests := range runs0 {
+ runTests(c, description, iterLeaf(), tests)
+ }
+}
+
+// root
+// |
+// a
+func (s *IterSuite) TestOneChild(c *C) {
+ for description, tests := range runs1 {
+ runTests(c, description, iter1(), tests)
+ }
+}
+
+// root
+// / \
+// a b
+func (s *IterSuite) Test2HorizontalSorted(c *C) {
+ for description, tests := range runs2Horizontal {
+ runTests(c, description, iter2HorizontalSorted(), tests)
+ }
+}
+
+// root
+// / \
+// b a
+func (s *IterSuite) Test2HorizontalReverse(c *C) {
+ for description, tests := range runs2Horizontal {
+ runTests(c, description, iter2HorizontalReverse(), tests)
+ }
+}
+
+// root
+// |
+// a
+// |
+// b
+func (s *IterSuite) Test2VerticalSorted(c *C) {
+ for description, tests := range runs2VerticalSorted {
+ runTests(c, description, iter2VerticalSorted(), tests)
+ }
+}
+
+// root
+// |
+// b
+// |
+// a
+func (s *IterSuite) Test2VerticalReverse(c *C) {
+ for description, tests := range runs2VerticalReverse {
+ runTests(c, description, iter2VerticalReverse(), tests)
+ }
+}
+
+// root
+// /|\
+// c a b
+func (s *IterSuite) Test3Horizontal(c *C) {
+ for description, tests := range runs3Horizontal {
+ runTests(c, description, iter3Horizontal(), tests)
+ }
+}
+
+// root
+// |
+// b
+// |
+// c
+// |
+// a
+func (s *IterSuite) Test3Vertical(c *C) {
+ for description, tests := range runs3Vertical {
+ runTests(c, description, iter3Vertical(), tests)
+ }
+}
+
+// root
+// / \
+// c a
+// |
+// b
+func (s *IterSuite) Test3Mix1(c *C) {
+ for description, tests := range runs3Mix1 {
+ runTests(c, description, iter3Mix1(), tests)
+ }
+}
+
+// root
+// / \
+// b a
+// |
+// c
+func (s *IterSuite) Test3Mix2(c *C) {
+ for description, tests := range runs3Mix2 {
+ runTests(c, description, iter3Mix2(), tests)
+ }
+}
+
+// root
+// / | \
+// / | ----
+// f d h --------
+// /\ / \ |
+// e a j b g
+// | / \ |
+// l n k icm
+// |
+// o
+// |
+// p
+func (s *IterSuite) TestCrazy(c *C) {
+ for description, tests := range runsCrazy {
+ runTests(c, description, iterCrazy(), tests)
+ }
+}
diff --git a/plumbing/difftree/internal/merkletrie/node.go b/plumbing/difftree/internal/merkletrie/node.go
new file mode 100644
index 0000000..99be5b8
--- /dev/null
+++ b/plumbing/difftree/internal/merkletrie/node.go
@@ -0,0 +1,65 @@
+package merkletrie
+
+import (
+ "sort"
+ "strings"
+)
+
+// A node is a Noder implementation for testing purposes: It is easier
+// to create test trees using nodes than using real git tree objects.
+type node struct {
+ hash []byte
+ key string
+ children []*node
+}
+
+// newNode returns a new Node with the given hash, key and children
+// (children can be specified in any order).
+func newNode(hash []byte, key string, children []*node) *node {
+ sort.Sort(reverseAlphabeticallyByKey(children))
+
+ return &node{
+ hash: hash,
+ key: key,
+ children: children,
+ }
+}
+
+// Hash returns the hash of the node.
+func (n *node) Hash() []byte {
+ return n.hash
+}
+
+// Key returns the key of the node.
+func (n *node) Key() string {
+ return n.key
+}
+
+// NumChildren returns the number of children.
+func (n *node) NumChildren() int {
+ return len(n.children)
+}
+
+// Children returns the node's children in reverse key alphabetical
+// order.
+func (n *node) Children() []Noder {
+ ret := make([]Noder, n.NumChildren())
+ for i := range n.children {
+ ret[i] = n.children[i]
+ }
+ return ret
+}
+
+type reverseAlphabeticallyByKey []*node
+
+func (a reverseAlphabeticallyByKey) Len() int {
+ return len(a)
+}
+
+func (a reverseAlphabeticallyByKey) Swap(i, j int) {
+ a[i], a[j] = a[j], a[i]
+}
+
+func (a reverseAlphabeticallyByKey) Less(i, j int) bool {
+ return strings.Compare(a[i].key, a[j].key) > 0
+}
diff --git a/plumbing/difftree/internal/merkletrie/node_test.go b/plumbing/difftree/internal/merkletrie/node_test.go
new file mode 100644
index 0000000..1622952
--- /dev/null
+++ b/plumbing/difftree/internal/merkletrie/node_test.go
@@ -0,0 +1,68 @@
+package merkletrie
+
+import (
+ "testing"
+
+ . "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) { TestingT(t) }
+
+type NodeSuite struct{}
+
+var _ = Suite(&NodeSuite{})
+
+func (s *NodeSuite) TestHash(c *C) {
+ n := newNode([]byte("the_hash"), "the_key", []*node{})
+
+ expected := []byte("the_hash")
+ c.Assert(expected, DeepEquals, n.Hash())
+}
+
+func (s *NodeSuite) TestKey(c *C) {
+ n := newNode([]byte("the_hash"), "the_key", []*node{})
+
+ expected := "the_key"
+ c.Assert(expected, Equals, n.Key())
+}
+
+func (s *NodeSuite) TestNoChildren(c *C) {
+ n := newNode([]byte{}, "", []*node{})
+
+ expectedNumChildren := 0
+ c.Assert(n.NumChildren(), Equals, expectedNumChildren)
+
+ expectedChildren := []Noder{}
+ c.Assert(n.Children(), DeepEquals, expectedChildren)
+}
+
+func (s *NodeSuite) TestOneChild(c *C) {
+ child := newNode([]byte("child"), "child", []*node{})
+ parent := newNode([]byte("parent"), "parent", []*node{child})
+
+ expectedNumChildren := 1
+ c.Assert(parent.NumChildren(), Equals, expectedNumChildren)
+
+ expectedChildren := []Noder{Noder(child)}
+ c.Assert(parent.Children(), DeepEquals, expectedChildren)
+}
+
+func (s *NodeSuite) TestManyChildren(c *C) {
+ child0 := newNode([]byte("child0"), "child0", []*node{})
+ child1 := newNode([]byte("child1"), "child1", []*node{})
+ child2 := newNode([]byte("child2"), "child2", []*node{})
+ child3 := newNode([]byte("child3"), "child3", []*node{})
+ // children are added unsorted.
+ parent := newNode([]byte("parent"), "parent", []*node{child1, child3, child0, child2})
+
+ expectedNumChildren := 4
+ c.Assert(parent.NumChildren(), Equals, expectedNumChildren)
+
+ expectedChildren := []Noder{ // sorted alphabetically by key
+ Noder(child3),
+ Noder(child2),
+ Noder(child1),
+ Noder(child0),
+ }
+ c.Assert(parent.Children(), DeepEquals, expectedChildren)
+}
diff --git a/plumbing/difftree/internal/merkletrie/noder.go b/plumbing/difftree/internal/merkletrie/noder.go
new file mode 100644
index 0000000..3566657
--- /dev/null
+++ b/plumbing/difftree/internal/merkletrie/noder.go
@@ -0,0 +1,20 @@
+package merkletrie
+
+// The Noder interface is implemented by the elements of a Merkle Trie.
+type Noder interface {
+ // Hash returns the hash of the element.
+ Hash() []byte
+ // Key returns the key of the element.
+ Key() string
+ // Children returns the children of the element, sorted
+ // in reverse key alphabetical order.
+ Children() []Noder
+ // NumChildren returns the number of children this element has.
+ //
+ // This method is an optimization: the number of children is easily
+ // calculated as the length of the value returned by the Children
+ // method (above); yet, some implementations will be able to
+ // implement NumChildren in O(1) while Children is usually more
+ // complex.
+ NumChildren() int
+}
diff --git a/plumbing/errors.go b/plumbing/error.go
index a3ebed3..a3ebed3 100644
--- a/plumbing/errors.go
+++ b/plumbing/error.go
diff --git a/plumbing/format/packfile/decoder.go b/plumbing/format/packfile/decoder.go
index 4ab8eb0..59a2f8d 100644
--- a/plumbing/format/packfile/decoder.go
+++ b/plumbing/format/packfile/decoder.go
@@ -52,7 +52,7 @@ var (
// ObjectStorer was provided or not.
type Decoder struct {
s *Scanner
- o storer.ObjectStorer
+ o storer.EncodedObjectStorer
tx storer.Transaction
isDecoded bool
@@ -71,7 +71,7 @@ type Decoder struct {
//
// If the ObjectStorer implements storer.Transactioner, a transaction is created
// during the Decode execution, if something fails the Rollback is called
-func NewDecoder(s *Scanner, o storer.ObjectStorer) (*Decoder, error) {
+func NewDecoder(s *Scanner, o storer.EncodedObjectStorer) (*Decoder, error) {
if !canResolveDeltas(s, o) {
return nil, ErrResolveDeltasNotSupported
}
@@ -86,7 +86,7 @@ func NewDecoder(s *Scanner, o storer.ObjectStorer) (*Decoder, error) {
}, nil
}
-func canResolveDeltas(s *Scanner, o storer.ObjectStorer) bool {
+func canResolveDeltas(s *Scanner, o storer.EncodedObjectStorer) bool {
return s.IsSeekable || o != nil
}
@@ -140,7 +140,7 @@ func (d *Decoder) decodeObjectsWithObjectStorer(count int) error {
return err
}
- if _, err := d.o.SetObject(obj); err != nil {
+ if _, err := d.o.SetEncodedObject(obj); err != nil {
return err
}
}
@@ -157,7 +157,7 @@ func (d *Decoder) decodeObjectsWithObjectStorerTx(count int) error {
return err
}
- if _, err := d.tx.SetObject(obj); err != nil {
+ if _, err := d.tx.SetEncodedObject(obj); err != nil {
if rerr := d.tx.Rollback(); rerr != nil {
return ErrRollback.AddDetails(
"error: %s, during tx.Set error: %s", rerr, err,
@@ -175,7 +175,7 @@ func (d *Decoder) decodeObjectsWithObjectStorerTx(count int) error {
// DecodeObject reads the next object from the scanner and returns it. This
// method can be used in replacement of the Decode method, to work in a
// interative way
-func (d *Decoder) DecodeObject() (plumbing.Object, error) {
+func (d *Decoder) DecodeObject() (plumbing.EncodedObject, error) {
h, err := d.s.NextObjectHeader()
if err != nil {
return nil, err
@@ -207,17 +207,17 @@ func (d *Decoder) DecodeObject() (plumbing.Object, error) {
return obj, nil
}
-func (d *Decoder) newObject() plumbing.Object {
+func (d *Decoder) newObject() plumbing.EncodedObject {
if d.o == nil {
return &plumbing.MemoryObject{}
}
- return d.o.NewObject()
+ return d.o.NewEncodedObject()
}
// DecodeObjectAt reads an object at the given location, if Decode wasn't called
// previously objects offset should provided using the SetOffsets method
-func (d *Decoder) DecodeObjectAt(offset int64) (plumbing.Object, error) {
+func (d *Decoder) DecodeObjectAt(offset int64) (plumbing.EncodedObject, error) {
if !d.s.IsSeekable {
return nil, ErrNonSeekable
}
@@ -237,7 +237,7 @@ func (d *Decoder) DecodeObjectAt(offset int64) (plumbing.Object, error) {
return d.DecodeObject()
}
-func (d *Decoder) fillRegularObjectContent(obj plumbing.Object) (uint32, error) {
+func (d *Decoder) fillRegularObjectContent(obj plumbing.EncodedObject) (uint32, error) {
w, err := obj.Writer()
if err != nil {
return 0, err
@@ -247,7 +247,7 @@ func (d *Decoder) fillRegularObjectContent(obj plumbing.Object) (uint32, error)
return crc, err
}
-func (d *Decoder) fillREFDeltaObjectContent(obj plumbing.Object, ref plumbing.Hash) (uint32, error) {
+func (d *Decoder) fillREFDeltaObjectContent(obj plumbing.EncodedObject, ref plumbing.Hash) (uint32, error) {
buf := bytes.NewBuffer(nil)
_, crc, err := d.s.NextObject(buf)
if err != nil {
@@ -263,7 +263,7 @@ func (d *Decoder) fillREFDeltaObjectContent(obj plumbing.Object, ref plumbing.Ha
return crc, ApplyDelta(obj, base, buf.Bytes())
}
-func (d *Decoder) fillOFSDeltaObjectContent(obj plumbing.Object, offset int64) (uint32, error) {
+func (d *Decoder) fillOFSDeltaObjectContent(obj plumbing.EncodedObject, offset int64) (uint32, error) {
buf := bytes.NewBuffer(nil)
_, crc, err := d.s.NextObject(buf)
if err != nil {
@@ -288,7 +288,7 @@ func (d *Decoder) setCRC(h plumbing.Hash, crc uint32) {
d.crcs[h] = crc
}
-func (d *Decoder) recallByOffset(o int64) (plumbing.Object, error) {
+func (d *Decoder) recallByOffset(o int64) (plumbing.EncodedObject, error) {
if d.s.IsSeekable {
return d.DecodeObjectAt(o)
}
@@ -300,7 +300,7 @@ func (d *Decoder) recallByOffset(o int64) (plumbing.Object, error) {
return nil, plumbing.ErrObjectNotFound
}
-func (d *Decoder) recallByHash(h plumbing.Hash) (plumbing.Object, error) {
+func (d *Decoder) recallByHash(h plumbing.Hash) (plumbing.EncodedObject, error) {
if d.s.IsSeekable {
if o, ok := d.hashToOffset[h]; ok {
return d.DecodeObjectAt(o)
@@ -312,11 +312,11 @@ func (d *Decoder) recallByHash(h plumbing.Hash) (plumbing.Object, error) {
// recallByHashNonSeekable if we are in a transaction the objects are read from
// the transaction, if not are directly read from the ObjectStorer
-func (d *Decoder) recallByHashNonSeekable(h plumbing.Hash) (obj plumbing.Object, err error) {
+func (d *Decoder) recallByHashNonSeekable(h plumbing.Hash) (obj plumbing.EncodedObject, err error) {
if d.tx != nil {
- obj, err = d.tx.Object(plumbing.AnyObject, h)
+ obj, err = d.tx.EncodedObject(plumbing.AnyObject, h)
} else {
- obj, err = d.o.Object(plumbing.AnyObject, h)
+ obj, err = d.o.EncodedObject(plumbing.AnyObject, h)
}
if err != plumbing.ErrObjectNotFound {
diff --git a/plumbing/format/packfile/decoder_test.go b/plumbing/format/packfile/decoder_test.go
index 865cdae..fdf4c96 100644
--- a/plumbing/format/packfile/decoder_test.go
+++ b/plumbing/format/packfile/decoder_test.go
@@ -95,7 +95,7 @@ func (s *ReaderSuite) TestDecodeNoSeekableWithTxStorer(c *C) {
scanner := packfile.NewScanner(reader)
- var storage storer.ObjectStorer = memory.NewStorage()
+ var storage storer.EncodedObjectStorer = memory.NewStorage()
_, isTxStorer := storage.(storer.Transactioner)
c.Assert(isTxStorer, Equals, true)
@@ -119,7 +119,7 @@ func (s *ReaderSuite) TestDecodeNoSeekableWithoutTxStorer(c *C) {
scanner := packfile.NewScanner(reader)
- var storage storer.ObjectStorer
+ var storage storer.EncodedObjectStorer
storage, _ = filesystem.NewStorage(fs.New())
_, isTxStorer := storage.(storer.Transactioner)
c.Assert(isTxStorer, Equals, false)
@@ -236,18 +236,18 @@ func (s *ReaderSuite) TestSetOffsets(c *C) {
c.Assert(o[h], Equals, int64(42))
}
-func assertObjects(c *C, s storer.ObjectStorer, expects []string) {
+func assertObjects(c *C, s storer.EncodedObjectStorer, expects []string) {
- i, err := s.IterObjects(plumbing.AnyObject)
+ i, err := s.IterEncodedObjects(plumbing.AnyObject)
c.Assert(err, IsNil)
var count int
- err = i.ForEach(func(plumbing.Object) error { count++; return nil })
+ err = i.ForEach(func(plumbing.EncodedObject) error { count++; return nil })
c.Assert(err, IsNil)
c.Assert(count, Equals, len(expects))
for _, exp := range expects {
- obt, err := s.Object(plumbing.AnyObject, plumbing.NewHash(exp))
+ obt, err := s.EncodedObject(plumbing.AnyObject, plumbing.NewHash(exp))
c.Assert(err, IsNil)
c.Assert(obt.Hash().String(), Equals, exp)
}
diff --git a/plumbing/format/packfile/diff_delta.go b/plumbing/format/packfile/diff_delta.go
index fb05a79..bc4fafa 100644
--- a/plumbing/format/packfile/diff_delta.go
+++ b/plumbing/format/packfile/diff_delta.go
@@ -17,17 +17,17 @@ const (
// GetOFSDelta returns an offset delta that knows the way of how to transform
// base object to target object
-func GetOFSDelta(base, target plumbing.Object) (plumbing.Object, error) {
+func GetOFSDelta(base, target plumbing.EncodedObject) (plumbing.EncodedObject, error) {
return getDelta(base, target, plumbing.OFSDeltaObject)
}
// GetRefDelta returns a reference delta that knows the way of how to transform
// base object to target object
-func GetRefDelta(base, target plumbing.Object) (plumbing.Object, error) {
+func GetRefDelta(base, target plumbing.EncodedObject) (plumbing.EncodedObject, error) {
return getDelta(base, target, plumbing.REFDeltaObject)
}
-func getDelta(base, target plumbing.Object, t plumbing.ObjectType) (plumbing.Object, error) {
+func getDelta(base, target plumbing.EncodedObject, t plumbing.ObjectType) (plumbing.EncodedObject, error) {
if t != plumbing.OFSDeltaObject && t != plumbing.REFDeltaObject {
return nil, fmt.Errorf("Type not supported: %v", t)
}
diff --git a/plumbing/format/packfile/encoder.go b/plumbing/format/packfile/encoder.go
index eb1c532..847e9e1 100644
--- a/plumbing/format/packfile/encoder.go
+++ b/plumbing/format/packfile/encoder.go
@@ -14,7 +14,7 @@ import (
// Encoder gets the data from the storage and write it into the writer in PACK
// format
type Encoder struct {
- storage storer.ObjectStorer
+ storage storer.EncodedObjectStorer
w *offsetWriter
zw *zlib.Writer
hasher plumbing.Hasher
@@ -23,7 +23,7 @@ type Encoder struct {
// NewEncoder creates a new packfile encoder using a specific Writer and
// ObjectStorer
-func NewEncoder(w io.Writer, s storer.ObjectStorer) *Encoder {
+func NewEncoder(w io.Writer, s storer.EncodedObjectStorer) *Encoder {
h := plumbing.Hasher{
Hash: sha1.New(),
}
@@ -44,7 +44,7 @@ func NewEncoder(w io.Writer, s storer.ObjectStorer) *Encoder {
func (e *Encoder) Encode(hashes []plumbing.Hash) (plumbing.Hash, error) {
var objects []*ObjectToPack
for _, h := range hashes {
- o, err := e.storage.Object(plumbing.AnyObject, h)
+ o, err := e.storage.EncodedObject(plumbing.AnyObject, h)
if err != nil {
return plumbing.ZeroHash, err
}
diff --git a/plumbing/format/packfile/encoder_test.go b/plumbing/format/packfile/encoder_test.go
index b07e2f4..1a94d16 100644
--- a/plumbing/format/packfile/encoder_test.go
+++ b/plumbing/format/packfile/encoder_test.go
@@ -44,7 +44,7 @@ func (s *EncoderSuite) TestCorrectPackWithOneEmptyObject(c *C) {
o := &plumbing.MemoryObject{}
o.SetType(plumbing.CommitObject)
o.SetSize(0)
- _, err := s.store.SetObject(o)
+ _, err := s.store.SetEncodedObject(o)
c.Assert(err, IsNil)
hash, err := s.enc.Encode([]plumbing.Hash{o.Hash()})
@@ -69,10 +69,10 @@ func (s *EncoderSuite) TestCorrectPackWithOneEmptyObject(c *C) {
}
func (s *EncoderSuite) TestMaxObjectSize(c *C) {
- o := s.store.NewObject()
+ o := s.store.NewEncodedObject()
o.SetSize(9223372036854775807)
o.SetType(plumbing.CommitObject)
- _, err := s.store.SetObject(o)
+ _, err := s.store.SetEncodedObject(o)
c.Assert(err, IsNil)
hash, err := s.enc.Encode([]plumbing.Hash{o.Hash()})
c.Assert(err, IsNil)
@@ -98,14 +98,14 @@ func (s *EncoderSuite) TestDecodeEncodeDecode(c *C) {
c.Assert(err, IsNil)
c.Assert(ch, Equals, f.PackfileHash)
- objIter, err := d.o.IterObjects(plumbing.AnyObject)
+ objIter, err := d.o.IterEncodedObjects(plumbing.AnyObject)
c.Assert(err, IsNil)
- objects := []plumbing.Object{}
+ objects := []plumbing.EncodedObject{}
hashes := []plumbing.Hash{}
- err = objIter.ForEach(func(o plumbing.Object) error {
+ err = objIter.ForEach(func(o plumbing.EncodedObject) error {
objects = append(objects, o)
- hash, err := s.store.SetObject(o)
+ hash, err := s.store.SetEncodedObject(o)
c.Assert(err, IsNil)
hashes = append(hashes, hash)
@@ -124,10 +124,10 @@ func (s *EncoderSuite) TestDecodeEncodeDecode(c *C) {
_, err = d.Decode()
c.Assert(err, IsNil)
- objIter, err = d.o.IterObjects(plumbing.AnyObject)
+ objIter, err = d.o.IterEncodedObjects(plumbing.AnyObject)
c.Assert(err, IsNil)
- obtainedObjects := []plumbing.Object{}
- err = objIter.ForEach(func(o plumbing.Object) error {
+ obtainedObjects := []plumbing.EncodedObject{}
+ err = objIter.ForEach(func(o plumbing.EncodedObject) error {
obtainedObjects = append(obtainedObjects, o)
return nil
@@ -187,11 +187,11 @@ func (s *EncoderSuite) simpleDeltaTest(c *C, t plumbing.ObjectType) {
_, err = d.Decode()
c.Assert(err, IsNil)
- decSrc, err := storage.Object(srcObject.Type(), srcObject.Hash())
+ decSrc, err := storage.EncodedObject(srcObject.Type(), srcObject.Hash())
c.Assert(err, IsNil)
c.Assert(decSrc, DeepEquals, srcObject)
- decTarget, err := storage.Object(targetObject.Type(), targetObject.Hash())
+ decTarget, err := storage.EncodedObject(targetObject.Type(), targetObject.Hash())
c.Assert(err, IsNil)
c.Assert(decTarget, DeepEquals, targetObject)
}
@@ -226,20 +226,20 @@ func (s *EncoderSuite) deltaOverDeltaTest(c *C, t plumbing.ObjectType) {
_, err = d.Decode()
c.Assert(err, IsNil)
- decSrc, err := storage.Object(srcObject.Type(), srcObject.Hash())
+ decSrc, err := storage.EncodedObject(srcObject.Type(), srcObject.Hash())
c.Assert(err, IsNil)
c.Assert(decSrc, DeepEquals, srcObject)
- decTarget, err := storage.Object(targetObject.Type(), targetObject.Hash())
+ decTarget, err := storage.EncodedObject(targetObject.Type(), targetObject.Hash())
c.Assert(err, IsNil)
c.Assert(decTarget, DeepEquals, targetObject)
- decOtherTarget, err := storage.Object(otherTargetObject.Type(), otherTargetObject.Hash())
+ decOtherTarget, err := storage.EncodedObject(otherTargetObject.Type(), otherTargetObject.Hash())
c.Assert(err, IsNil)
c.Assert(decOtherTarget, DeepEquals, otherTargetObject)
}
-func delta(base, target plumbing.Object, t plumbing.ObjectType) (plumbing.Object, error) {
+func delta(base, target plumbing.EncodedObject, t plumbing.ObjectType) (plumbing.EncodedObject, error) {
switch t {
case plumbing.OFSDeltaObject:
return GetOFSDelta(base, target)
@@ -250,7 +250,7 @@ func delta(base, target plumbing.Object, t plumbing.ObjectType) (plumbing.Object
}
}
-func newObject(t plumbing.ObjectType, cont []byte) plumbing.Object {
+func newObject(t plumbing.ObjectType, cont []byte) plumbing.EncodedObject {
o := plumbing.MemoryObject{}
o.SetType(t)
o.SetSize(int64(len(cont)))
diff --git a/plumbing/format/packfile/object_pack.go b/plumbing/format/packfile/object_pack.go
index dc5a3c7..dfe9bb2 100644
--- a/plumbing/format/packfile/object_pack.go
+++ b/plumbing/format/packfile/object_pack.go
@@ -6,20 +6,21 @@ import "gopkg.in/src-d/go-git.v4/plumbing"
// pack file.
type ObjectToPack struct {
// The main object to pack, it could be any object, including deltas
- Object plumbing.Object
+ Object plumbing.EncodedObject
// Base is the object that a delta is based on (it could be also another delta).
// If the main object is not a delta, Base will be null
Base *ObjectToPack
// Original is the object that we can generate applying the delta to
- // Base, or the same object as Object in the case of a non-delta object.
- Original plumbing.Object
+ // Base, or the same object as EncodedObject in the case of a non-delta
+ // object.
+ Original plumbing.EncodedObject
// Depth is the amount of deltas needed to resolve to obtain Original
// (delta based on delta based on ...)
Depth int
}
// newObjectToPack creates a correct ObjectToPack based on a non-delta object
-func newObjectToPack(o plumbing.Object) *ObjectToPack {
+func newObjectToPack(o plumbing.EncodedObject) *ObjectToPack {
return &ObjectToPack{
Object: o,
Original: o,
@@ -29,7 +30,7 @@ func newObjectToPack(o plumbing.Object) *ObjectToPack {
// newDeltaObjectToPack creates a correct ObjectToPack for a delta object, based on
// his base (could be another delta), the delta target (in this case called original),
// and the delta Object itself
-func newDeltaObjectToPack(base *ObjectToPack, original, delta plumbing.Object) *ObjectToPack {
+func newDeltaObjectToPack(base *ObjectToPack, original, delta plumbing.EncodedObject) *ObjectToPack {
return &ObjectToPack{
Object: delta,
Base: base,
diff --git a/plumbing/format/packfile/patch_delta.go b/plumbing/format/packfile/patch_delta.go
index 2493a39..36219bb 100644
--- a/plumbing/format/packfile/patch_delta.go
+++ b/plumbing/format/packfile/patch_delta.go
@@ -14,7 +14,7 @@ import (
const deltaSizeMin = 4
// ApplyDelta writes to taget the result of applying the modification deltas in delta to base.
-func ApplyDelta(target, base plumbing.Object, delta []byte) error {
+func ApplyDelta(target, base plumbing.EncodedObject, delta []byte) error {
r, err := base.Reader()
if err != nil {
return err
diff --git a/plumbing/object.go b/plumbing/object.go
index 23abd4f..3304da2 100644
--- a/plumbing/object.go
+++ b/plumbing/object.go
@@ -13,7 +13,7 @@ var (
)
// Object is a generic representation of any git object
-type Object interface {
+type EncodedObject interface {
Hash() Hash
Type() ObjectType
SetType(ObjectType)
diff --git a/plumbing/object/blob.go b/plumbing/object/blob.go
new file mode 100644
index 0000000..b0cac41
--- /dev/null
+++ b/plumbing/object/blob.go
@@ -0,0 +1,135 @@
+package object
+
+import (
+ "io"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+ "gopkg.in/src-d/go-git.v4/utils/ioutil"
+)
+
+// Blob is used to store file data - it is generally a file.
+type Blob struct {
+ Hash plumbing.Hash
+ Size int64
+
+ obj plumbing.EncodedObject
+}
+
+// GetBlob gets a blob from an object storer and decodes it.
+func GetBlob(s storer.EncodedObjectStorer, h plumbing.Hash) (*Blob, error) {
+ o, err := s.EncodedObject(plumbing.BlobObject, h)
+ if err != nil {
+ return nil, err
+ }
+
+ return DecodeBlob(o)
+}
+
+func DecodeBlob(o plumbing.EncodedObject) (*Blob, error) {
+ b := &Blob{}
+ if err := b.Decode(o); err != nil {
+ return nil, err
+ }
+
+ return b, nil
+}
+
+// ID returns the object ID of the blob. The returned value will always match
+// the current value of Blob.Hash.
+//
+// ID is present to fulfill the Object interface.
+func (b *Blob) ID() plumbing.Hash {
+ return b.Hash
+}
+
+// Type returns the type of object. It always returns plumbing.BlobObject.
+//
+// Type is present to fulfill the Object interface.
+func (b *Blob) Type() plumbing.ObjectType {
+ return plumbing.BlobObject
+}
+
+// Decode transforms a plumbing.EncodedObject into a Blob struct.
+func (b *Blob) Decode(o plumbing.EncodedObject) error {
+ if o.Type() != plumbing.BlobObject {
+ return ErrUnsupportedObject
+ }
+
+ b.Hash = o.Hash()
+ b.Size = o.Size()
+ b.obj = o
+
+ return nil
+}
+
+// Encode transforms a Blob into a plumbing.EncodedObject.
+func (b *Blob) Encode(o plumbing.EncodedObject) error {
+ w, err := o.Writer()
+ if err != nil {
+ return err
+ }
+ defer ioutil.CheckClose(w, &err)
+ r, err := b.Reader()
+ if err != nil {
+ return err
+ }
+ defer ioutil.CheckClose(r, &err)
+ _, err = io.Copy(w, r)
+ o.SetType(plumbing.BlobObject)
+ return err
+}
+
+// Reader returns a reader allow the access to the content of the blob
+func (b *Blob) Reader() (io.ReadCloser, error) {
+ return b.obj.Reader()
+}
+
+// BlobIter provides an iterator for a set of blobs.
+type BlobIter struct {
+ storer.EncodedObjectIter
+ s storer.EncodedObjectStorer
+}
+
+// NewBlobIter returns a CommitIter for the given repository and underlying
+// object iterator.
+//
+// The returned BlobIter will automatically skip over non-blob objects.
+func NewBlobIter(s storer.EncodedObjectStorer, iter storer.EncodedObjectIter) *BlobIter {
+ return &BlobIter{iter, s}
+}
+
+// Next moves the iterator to the next blob and returns a pointer to it. If it
+// has reached the end of the set it will return io.EOF.
+func (iter *BlobIter) Next() (*Blob, error) {
+ for {
+ obj, err := iter.EncodedObjectIter.Next()
+ if err != nil {
+ return nil, err
+ }
+
+ if obj.Type() != plumbing.BlobObject {
+ continue
+ }
+
+ return DecodeBlob(obj)
+ }
+}
+
+// ForEach call the cb function for each blob contained on this iter until
+// an error happens or the end of the iter is reached. If ErrStop is sent
+// the iteration is stop but no error is returned. The iterator is closed.
+func (iter *BlobIter) ForEach(cb func(*Blob) error) error {
+ return iter.EncodedObjectIter.ForEach(func(obj plumbing.EncodedObject) error {
+ if obj.Type() != plumbing.BlobObject {
+ return nil
+ }
+
+ b, err := DecodeBlob(obj)
+ if err != nil {
+ return err
+ }
+
+ return cb(b)
+ })
+}
diff --git a/plumbing/object/blob_test.go b/plumbing/object/blob_test.go
new file mode 100644
index 0000000..5ed9de0
--- /dev/null
+++ b/plumbing/object/blob_test.go
@@ -0,0 +1,96 @@
+package object
+
+import (
+ "io"
+ "io/ioutil"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+
+ . "gopkg.in/check.v1"
+)
+
+type BlobsSuite struct {
+ BaseObjectsSuite
+}
+
+var _ = Suite(&BlobsSuite{})
+
+func (s *BlobsSuite) TestBlobHash(c *C) {
+ o := &plumbing.MemoryObject{}
+ o.SetType(plumbing.BlobObject)
+ o.SetSize(3)
+
+ writer, err := o.Writer()
+ c.Assert(err, IsNil)
+ defer func() { c.Assert(writer.Close(), IsNil) }()
+
+ writer.Write([]byte{'F', 'O', 'O'})
+
+ blob := &Blob{}
+ c.Assert(blob.Decode(o), IsNil)
+
+ c.Assert(blob.Size, Equals, int64(3))
+ c.Assert(blob.Hash.String(), Equals, "d96c7efbfec2814ae0301ad054dc8d9fc416c9b5")
+
+ reader, err := blob.Reader()
+ c.Assert(err, IsNil)
+ defer func() { c.Assert(reader.Close(), IsNil) }()
+
+ data, err := ioutil.ReadAll(reader)
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, "FOO")
+}
+
+func (s *BlobsSuite) TestBlobDecodeEncodeIdempotent(c *C) {
+ var objects []*plumbing.MemoryObject
+ for _, str := range []string{"foo", "foo\n"} {
+ obj := &plumbing.MemoryObject{}
+ obj.Write([]byte(str))
+ obj.SetType(plumbing.BlobObject)
+ obj.Hash()
+ objects = append(objects, obj)
+ }
+ for _, object := range objects {
+ blob := &Blob{}
+ err := blob.Decode(object)
+ c.Assert(err, IsNil)
+ newObject := &plumbing.MemoryObject{}
+ err = blob.Encode(newObject)
+ c.Assert(err, IsNil)
+ newObject.Hash() // Ensure Hash is pre-computed before deep comparison
+ c.Assert(newObject, DeepEquals, object)
+ }
+}
+
+func (s *BlobsSuite) TestBlobIter(c *C) {
+ encIter, err := s.Storer.IterEncodedObjects(plumbing.BlobObject)
+ c.Assert(err, IsNil)
+ iter := NewBlobIter(s.Storer, encIter)
+
+ blobs := []*Blob{}
+ iter.ForEach(func(b *Blob) error {
+ blobs = append(blobs, b)
+ return nil
+ })
+
+ c.Assert(len(blobs) > 0, Equals, true)
+ iter.Close()
+
+ encIter, err = s.Storer.IterEncodedObjects(plumbing.BlobObject)
+ c.Assert(err, IsNil)
+ iter = NewBlobIter(s.Storer, encIter)
+
+ i := 0
+ for {
+ b, err := iter.Next()
+ if err == io.EOF {
+ break
+ }
+
+ c.Assert(err, IsNil)
+ c.Assert(b, DeepEquals, blobs[i])
+ i += 1
+ }
+
+ iter.Close()
+}
diff --git a/plumbing/object/commit.go b/plumbing/object/commit.go
new file mode 100644
index 0000000..f0ce6e8
--- /dev/null
+++ b/plumbing/object/commit.go
@@ -0,0 +1,293 @@
+package object
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ "sort"
+ "strings"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+ "gopkg.in/src-d/go-git.v4/utils/ioutil"
+)
+
+// Hash hash of an object
+type Hash plumbing.Hash
+
+// Commit points to a single tree, marking it as what the project looked like
+// at a certain point in time. It contains meta-information about that point
+// in time, such as a timestamp, the author of the changes since the last
+// commit, a pointer to the previous commit(s), etc.
+// http://schacon.github.io/gitbook/1_the_git_object_model.html
+type Commit struct {
+ Hash plumbing.Hash
+ Author Signature
+ Committer Signature
+ Message string
+
+ tree plumbing.Hash
+ parents []plumbing.Hash
+ s storer.EncodedObjectStorer
+}
+
+// GetCommit gets a commit from an object storer and decodes it.
+func GetCommit(s storer.EncodedObjectStorer, h plumbing.Hash) (*Commit, error) {
+ o, err := s.EncodedObject(plumbing.CommitObject, h)
+ if err != nil {
+ return nil, err
+ }
+
+ return DecodeCommit(s, o)
+}
+
+// DecodeCommit decodes an encoded object into a *Commit and associates it to
+// the given object storer.
+func DecodeCommit(s storer.EncodedObjectStorer, o plumbing.EncodedObject) (*Commit, error) {
+ c := &Commit{s: s}
+ if err := c.Decode(o); err != nil {
+ return nil, err
+ }
+
+ return c, nil
+}
+
+// Tree returns the Tree from the commit
+func (c *Commit) Tree() (*Tree, error) {
+ return GetTree(c.s, c.tree)
+}
+
+// Parents return a CommitIter to the parent Commits
+func (c *Commit) Parents() *CommitIter {
+ return NewCommitIter(c.s,
+ storer.NewEncodedObjectLookupIter(c.s, plumbing.CommitObject, c.parents),
+ )
+}
+
+// NumParents returns the number of parents in a commit.
+func (c *Commit) NumParents() int {
+ return len(c.parents)
+}
+
+// File returns the file with the specified "path" in the commit and a
+// nil error if the file exists. If the file does not exist, it returns
+// a nil file and the ErrFileNotFound error.
+func (c *Commit) File(path string) (*File, error) {
+ tree, err := c.Tree()
+ if err != nil {
+ return nil, err
+ }
+
+ return tree.File(path)
+}
+
+// Files returns a FileIter allowing to iterate over the Tree
+func (c *Commit) Files() (*FileIter, error) {
+ tree, err := c.Tree()
+ if err != nil {
+ return nil, err
+ }
+
+ return tree.Files(), nil
+}
+
+// ID returns the object ID of the commit. The returned value will always match
+// the current value of Commit.Hash.
+//
+// ID is present to fulfill the Object interface.
+func (c *Commit) ID() plumbing.Hash {
+ return c.Hash
+}
+
+// Type returns the type of object. It always returns plumbing.CommitObject.
+//
+// Type is present to fulfill the Object interface.
+func (c *Commit) Type() plumbing.ObjectType {
+ return plumbing.CommitObject
+}
+
+// Decode transforms a plumbing.EncodedObject into a Commit struct.
+func (c *Commit) Decode(o plumbing.EncodedObject) (err error) {
+ if o.Type() != plumbing.CommitObject {
+ return ErrUnsupportedObject
+ }
+
+ c.Hash = o.Hash()
+
+ reader, err := o.Reader()
+ if err != nil {
+ return err
+ }
+ defer ioutil.CheckClose(reader, &err)
+
+ r := bufio.NewReader(reader)
+
+ var message bool
+ for {
+ line, err := r.ReadSlice('\n')
+ if err != nil && err != io.EOF {
+ return err
+ }
+
+ if !message {
+ line = bytes.TrimSpace(line)
+ if len(line) == 0 {
+ message = true
+ continue
+ }
+
+ split := bytes.SplitN(line, []byte{' '}, 2)
+ switch string(split[0]) {
+ case "tree":
+ c.tree = plumbing.NewHash(string(split[1]))
+ case "parent":
+ c.parents = append(c.parents, plumbing.NewHash(string(split[1])))
+ case "author":
+ c.Author.Decode(split[1])
+ case "committer":
+ c.Committer.Decode(split[1])
+ }
+ } else {
+ c.Message += string(line)
+ }
+
+ if err == io.EOF {
+ return nil
+ }
+ }
+}
+
+// History return a slice with the previous commits in the history of this commit
+func (c *Commit) History() ([]*Commit, error) {
+ var commits []*Commit
+ err := WalkCommitHistory(c, func(commit *Commit) error {
+ commits = append(commits, commit)
+ return nil
+ })
+
+ ReverseSortCommits(commits)
+ return commits, err
+}
+
+// Encode transforms a Commit into a plumbing.EncodedObject.
+func (b *Commit) Encode(o plumbing.EncodedObject) error {
+ o.SetType(plumbing.CommitObject)
+ w, err := o.Writer()
+ if err != nil {
+ return err
+ }
+ defer ioutil.CheckClose(w, &err)
+ if _, err = fmt.Fprintf(w, "tree %s\n", b.tree.String()); err != nil {
+ return err
+ }
+ for _, parent := range b.parents {
+ if _, err = fmt.Fprintf(w, "parent %s\n", parent.String()); err != nil {
+ return err
+ }
+ }
+ if _, err = fmt.Fprint(w, "author "); err != nil {
+ return err
+ }
+ if err = b.Author.Encode(w); err != nil {
+ return err
+ }
+ if _, err = fmt.Fprint(w, "\ncommitter "); err != nil {
+ return err
+ }
+ if err = b.Committer.Encode(w); err != nil {
+ return err
+ }
+ if _, err = fmt.Fprintf(w, "\n\n%s", b.Message); err != nil {
+ return err
+ }
+ return err
+}
+
+func (c *Commit) String() string {
+ return fmt.Sprintf(
+ "%s %s\nAuthor: %s\nDate: %s\n\n%s\n",
+ plumbing.CommitObject, c.Hash, c.Author.String(),
+ c.Author.When.Format(DateFormat), indent(c.Message),
+ )
+}
+
+func indent(t string) string {
+ var output []string
+ for _, line := range strings.Split(t, "\n") {
+ if len(line) != 0 {
+ line = " " + line
+ }
+
+ output = append(output, line)
+ }
+
+ return strings.Join(output, "\n")
+}
+
+// CommitIter provides an iterator for a set of commits.
+type CommitIter struct {
+ storer.EncodedObjectIter
+ s storer.EncodedObjectStorer
+}
+
+// NewCommitIter returns a CommitIter for the given object storer and underlying
+// object iterator.
+//
+// The returned CommitIter will automatically skip over non-commit objects.
+func NewCommitIter(s storer.EncodedObjectStorer, iter storer.EncodedObjectIter) *CommitIter {
+ return &CommitIter{iter, s}
+}
+
+// Next moves the iterator to the next commit and returns a pointer to it. If it
+// has reached the end of the set it will return io.EOF.
+func (iter *CommitIter) Next() (*Commit, error) {
+ obj, err := iter.EncodedObjectIter.Next()
+ if err != nil {
+ return nil, err
+ }
+
+ return DecodeCommit(iter.s, obj)
+}
+
+// ForEach call the cb function for each commit contained on this iter until
+// an error appends or the end of the iter is reached. If ErrStop is sent
+// the iteration is stop but no error is returned. The iterator is closed.
+func (iter *CommitIter) ForEach(cb func(*Commit) error) error {
+ return iter.EncodedObjectIter.ForEach(func(obj plumbing.EncodedObject) error {
+ c, err := DecodeCommit(iter.s, obj)
+ if err != nil {
+ return err
+ }
+
+ return cb(c)
+ })
+}
+
+type commitSorterer struct {
+ l []*Commit
+}
+
+func (s commitSorterer) Len() int {
+ return len(s.l)
+}
+
+func (s commitSorterer) Less(i, j int) bool {
+ return s.l[i].Committer.When.Before(s.l[j].Committer.When)
+}
+
+func (s commitSorterer) Swap(i, j int) {
+ s.l[i], s.l[j] = s.l[j], s.l[i]
+}
+
+// SortCommits sort a commit list by commit date, from older to newer.
+func SortCommits(l []*Commit) {
+ s := &commitSorterer{l}
+ sort.Sort(s)
+}
+
+// ReverseSortCommits sort a commit list by commit date, from newer to older.
+func ReverseSortCommits(l []*Commit) {
+ s := &commitSorterer{l}
+ sort.Sort(sort.Reverse(s))
+}
diff --git a/plumbing/object/commit_test.go b/plumbing/object/commit_test.go
new file mode 100644
index 0000000..abf92dd
--- /dev/null
+++ b/plumbing/object/commit_test.go
@@ -0,0 +1,169 @@
+package object
+
+import (
+ "io"
+ "time"
+
+ "gopkg.in/src-d/go-git.v4/fixtures"
+ "gopkg.in/src-d/go-git.v4/plumbing"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/src-d/go-git.v4/storage/filesystem"
+)
+
+type SuiteCommit struct {
+ BaseObjectsSuite
+ Commit *Commit
+}
+
+var _ = Suite(&SuiteCommit{})
+
+func (s *SuiteCommit) SetUpSuite(c *C) {
+ s.BaseObjectsSuite.SetUpSuite(c)
+
+ hash := plumbing.NewHash("1669dce138d9b841a518c64b10914d88f5e488ea")
+
+ s.Commit = s.commit(c, hash)
+}
+
+func (s *SuiteCommit) TestDecodeNonCommit(c *C) {
+ hash := plumbing.NewHash("9a48f23120e880dfbe41f7c9b7b708e9ee62a492")
+ blob, err := s.Storer.EncodedObject(plumbing.AnyObject, hash)
+ c.Assert(err, IsNil)
+
+ commit := &Commit{}
+ err = commit.Decode(blob)
+ c.Assert(err, Equals, ErrUnsupportedObject)
+}
+
+func (s *SuiteCommit) TestType(c *C) {
+ c.Assert(s.Commit.Type(), Equals, plumbing.CommitObject)
+}
+
+func (s *SuiteCommit) TestTree(c *C) {
+ tree, err := s.Commit.Tree()
+ c.Assert(err, IsNil)
+ c.Assert(tree.ID().String(), Equals, "eba74343e2f15d62adedfd8c883ee0262b5c8021")
+}
+
+func (s *SuiteCommit) TestParents(c *C) {
+ expected := []string{
+ "35e85108805c84807bc66a02d91535e1e24b38b9",
+ "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69",
+ }
+
+ var output []string
+ i := s.Commit.Parents()
+ err := i.ForEach(func(commit *Commit) error {
+ output = append(output, commit.ID().String())
+ return nil
+ })
+
+ c.Assert(err, IsNil)
+ c.Assert(output, DeepEquals, expected)
+}
+
+func (s *SuiteCommit) TestCommitEncodeDecodeIdempotent(c *C) {
+ ts, err := time.Parse(time.RFC3339, "2006-01-02T15:04:05-07:00")
+ c.Assert(err, IsNil)
+ commits := []*Commit{
+ {
+ Author: Signature{Name: "Foo", Email: "foo@example.local", When: ts},
+ Committer: Signature{Name: "Bar", Email: "bar@example.local", When: ts},
+ Message: "Message\n\nFoo\nBar\nWith trailing blank lines\n\n",
+ tree: plumbing.NewHash("f000000000000000000000000000000000000001"),
+ parents: []plumbing.Hash{plumbing.NewHash("f000000000000000000000000000000000000002")},
+ },
+ {
+ Author: Signature{Name: "Foo", Email: "foo@example.local", When: ts},
+ Committer: Signature{Name: "Bar", Email: "bar@example.local", When: ts},
+ Message: "Message\n\nFoo\nBar\nWith no trailing blank lines",
+ tree: plumbing.NewHash("0000000000000000000000000000000000000003"),
+ parents: []plumbing.Hash{
+ plumbing.NewHash("f000000000000000000000000000000000000004"),
+ plumbing.NewHash("f000000000000000000000000000000000000005"),
+ plumbing.NewHash("f000000000000000000000000000000000000006"),
+ plumbing.NewHash("f000000000000000000000000000000000000007"),
+ },
+ },
+ }
+ for _, commit := range commits {
+ obj := &plumbing.MemoryObject{}
+ err = commit.Encode(obj)
+ c.Assert(err, IsNil)
+ newCommit := &Commit{}
+ err = newCommit.Decode(obj)
+ c.Assert(err, IsNil)
+ commit.Hash = obj.Hash()
+ c.Assert(newCommit, DeepEquals, commit)
+ }
+}
+
+func (s *SuiteCommit) TestFile(c *C) {
+ file, err := s.Commit.File("CHANGELOG")
+ c.Assert(err, IsNil)
+ c.Assert(file.Name, Equals, "CHANGELOG")
+}
+
+func (s *SuiteCommit) TestNumParents(c *C) {
+ c.Assert(s.Commit.NumParents(), Equals, 2)
+}
+
+func (s *SuiteCommit) TestHistory(c *C) {
+ commits, err := s.Commit.History()
+ c.Assert(err, IsNil)
+ c.Assert(commits, HasLen, 5)
+ c.Assert(commits[0].Hash.String(), Equals, s.Commit.Hash.String())
+ c.Assert(commits[len(commits)-1].Hash.String(), Equals, "b029517f6300c2da0f4b651b8642506cd6aaf45d")
+}
+
+func (s *SuiteCommit) TestString(c *C) {
+ c.Assert(s.Commit.String(), Equals, ""+
+ "commit 1669dce138d9b841a518c64b10914d88f5e488ea\n"+
+ "Author: Máximo Cuadros Ortiz <mcuadros@gmail.com>\n"+
+ "Date: Tue Mar 31 13:48:14 2015 +0200\n"+
+ "\n"+
+ " Merge branch 'master' of github.com:tyba/git-fixture\n"+
+ "\n",
+ )
+}
+
+func (s *SuiteCommit) TestStringMultiLine(c *C) {
+ hash := plumbing.NewHash("e7d896db87294e33ca3202e536d4d9bb16023db3")
+
+ f := fixtures.ByURL("https://github.com/src-d/go-git.git").One()
+ sto, err := filesystem.NewStorage(f.DotGit())
+
+ o, err := sto.EncodedObject(plumbing.CommitObject, hash)
+ c.Assert(err, IsNil)
+ commit, err := DecodeCommit(sto, o)
+ c.Assert(err, IsNil)
+
+ c.Assert(commit.String(), Equals, ""+
+ "commit e7d896db87294e33ca3202e536d4d9bb16023db3\n"+
+ "Author: Alberto Cortés <alberto@sourced.tech>\n"+
+ "Date: Wed Jan 27 11:13:49 2016 +0100\n"+
+ "\n"+
+ " fix zlib invalid header error\n"+
+ "\n"+
+ " The return value of reads to the packfile were being ignored, so zlib\n"+
+ " was getting invalid data on it read buffers.\n"+
+ "\n",
+ )
+}
+
+func (s *SuiteCommit) TestCommitIterNext(c *C) {
+ i := s.Commit.Parents()
+
+ commit, err := i.Next()
+ c.Assert(err, IsNil)
+ c.Assert(commit.ID().String(), Equals, "35e85108805c84807bc66a02d91535e1e24b38b9")
+
+ commit, err = i.Next()
+ c.Assert(err, IsNil)
+ c.Assert(commit.ID().String(), Equals, "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69")
+
+ commit, err = i.Next()
+ c.Assert(err, Equals, io.EOF)
+ c.Assert(commit, IsNil)
+}
diff --git a/plumbing/object/commit_walker.go b/plumbing/object/commit_walker.go
new file mode 100644
index 0000000..dcce6b9
--- /dev/null
+++ b/plumbing/object/commit_walker.go
@@ -0,0 +1,67 @@
+package object
+
+import (
+ "io"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+)
+
+type commitWalker struct {
+ seen map[plumbing.Hash]bool
+ stack []*CommitIter
+ start *Commit
+ cb func(*Commit) error
+}
+
+// WalkCommitHistory walks the commit history
+func WalkCommitHistory(c *Commit, cb func(*Commit) error) error {
+ w := &commitWalker{
+ seen: make(map[plumbing.Hash]bool),
+ stack: make([]*CommitIter, 0),
+ start: c,
+ cb: cb,
+ }
+
+ return w.walk()
+}
+
+func (w *commitWalker) walk() error {
+ var commit *Commit
+
+ if w.start != nil {
+ commit = w.start
+ w.start = nil
+ } else {
+ current := len(w.stack) - 1
+ if current < 0 {
+ return nil
+ }
+
+ var err error
+ commit, err = w.stack[current].Next()
+ if err == io.EOF {
+ w.stack = w.stack[:current]
+ return w.walk()
+ }
+
+ if err != nil {
+ return err
+ }
+ }
+
+ // check and update seen
+ if w.seen[commit.Hash] {
+ return w.walk()
+ }
+
+ w.seen[commit.Hash] = true
+ if commit.NumParents() > 0 {
+ w.stack = append(w.stack, commit.Parents())
+ }
+
+ if err := w.cb(commit); err != nil {
+ return err
+ }
+
+ return w.walk()
+}
diff --git a/plumbing/object/commit_walker_test.go b/plumbing/object/commit_walker_test.go
new file mode 100644
index 0000000..67d6695
--- /dev/null
+++ b/plumbing/object/commit_walker_test.go
@@ -0,0 +1,34 @@
+package object
+
+import . "gopkg.in/check.v1"
+
+type CommitWalkerSuite struct {
+ BaseObjectsSuite
+}
+
+var _ = Suite(&CommitWalkerSuite{})
+
+func (s *CommitWalkerSuite) TestWalkerNext(c *C) {
+ commit := s.commit(c, s.Fixture.Head)
+
+ var commits []*Commit
+
+ WalkCommitHistory(commit, func(c *Commit) error {
+ commits = append(commits, c)
+ return nil
+ })
+
+ SortCommits(commits)
+ c.Assert(commits, HasLen, 8)
+
+ expected := []string{
+ "b029517f6300c2da0f4b651b8642506cd6aaf45d", "b8e471f58bcbca63b07bda20e428190409c2db47",
+ "35e85108805c84807bc66a02d91535e1e24b38b9", "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69",
+ "1669dce138d9b841a518c64b10914d88f5e488ea", "af2d6a6954d532f8ffb47615169c8fdf9d383a1a",
+ "918c48b83bd081e863dbe1b80f8998f058cd8294", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5",
+ }
+
+ for i, commit := range commits {
+ c.Assert(commit.Hash.String(), Equals, expected[i])
+ }
+}
diff --git a/plumbing/object/file.go b/plumbing/object/file.go
new file mode 100644
index 0000000..4846f98
--- /dev/null
+++ b/plumbing/object/file.go
@@ -0,0 +1,116 @@
+package object
+
+import (
+ "bytes"
+ "io"
+ "os"
+ "strings"
+
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+ "gopkg.in/src-d/go-git.v4/utils/ioutil"
+)
+
+// File represents git file objects.
+type File struct {
+ Name string
+ Mode os.FileMode
+ Blob
+}
+
+// NewFile returns a File based on the given blob object
+func NewFile(name string, m os.FileMode, b *Blob) *File {
+ return &File{Name: name, Mode: m, Blob: *b}
+}
+
+// Contents returns the contents of a file as a string.
+func (f *File) Contents() (content string, err error) {
+ reader, err := f.Reader()
+ if err != nil {
+ return "", err
+ }
+ defer ioutil.CheckClose(reader, &err)
+
+ buf := new(bytes.Buffer)
+ if _, err := buf.ReadFrom(reader); err != nil {
+ return "", err
+ }
+
+ return buf.String(), nil
+}
+
+// Lines returns a slice of lines from the contents of a file, stripping
+// all end of line characters. If the last line is empty (does not end
+// in an end of line), it is also stripped.
+func (f *File) Lines() ([]string, error) {
+ content, err := f.Contents()
+ if err != nil {
+ return nil, err
+ }
+
+ splits := strings.Split(content, "\n")
+ // remove the last line if it is empty
+ if splits[len(splits)-1] == "" {
+ return splits[:len(splits)-1], nil
+ }
+
+ return splits, nil
+}
+
+type FileIter struct {
+ s storer.EncodedObjectStorer
+ w TreeWalker
+}
+
+func NewFileIter(s storer.EncodedObjectStorer, t *Tree) *FileIter {
+ return &FileIter{s: s, w: *NewTreeWalker(t, true)}
+}
+
+func (iter *FileIter) Next() (*File, error) {
+ for {
+ name, entry, err := iter.w.Next()
+ if err != nil {
+ return nil, err
+ }
+
+ if entry.Mode.IsDir() {
+ continue
+ }
+
+ blob, err := GetBlob(iter.s, entry.Hash)
+ if err != nil {
+ return nil, err
+ }
+
+ return NewFile(name, entry.Mode, blob), nil
+ }
+}
+
+// ForEach call the cb function for each file contained on this iter until
+// an error happends or the end of the iter is reached. If plumbing.ErrStop is sent
+// the iteration is stop but no error is returned. The iterator is closed.
+func (iter *FileIter) ForEach(cb func(*File) error) error {
+ defer iter.Close()
+
+ for {
+ f, err := iter.Next()
+ if err != nil {
+ if err == io.EOF {
+ return nil
+ }
+
+ return err
+ }
+
+ if err := cb(f); err != nil {
+ if err == storer.ErrStop {
+ return nil
+ }
+
+ return err
+ }
+ }
+}
+
+func (iter *FileIter) Close() {
+ iter.w.Close()
+}
diff --git a/plumbing/object/file_test.go b/plumbing/object/file_test.go
new file mode 100644
index 0000000..f734455
--- /dev/null
+++ b/plumbing/object/file_test.go
@@ -0,0 +1,249 @@
+package object
+
+import (
+ "io"
+
+ "gopkg.in/src-d/go-git.v4/fixtures"
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+ "gopkg.in/src-d/go-git.v4/storage/filesystem"
+
+ . "gopkg.in/check.v1"
+)
+
+type FileSuite struct {
+ BaseObjectsSuite
+}
+
+var _ = Suite(&FileSuite{})
+
+type fileIterExpectedEntry struct {
+ Name string
+ Hash string
+}
+
+var fileIterTests = []struct {
+ repo string // the repo name as in localRepos
+ commit string // the commit to search for the file
+ files []fileIterExpectedEntry
+}{
+ {"https://github.com/git-fixtures/basic.git", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", []fileIterExpectedEntry{
+ {".gitignore", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"},
+ {"CHANGELOG", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"},
+ {"LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"},
+ {"binary.jpg", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"},
+ {"go/example.go", "880cd14280f4b9b6ed3986d6671f907d7cc2a198"},
+ {"json/long.json", "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9"},
+ {"json/short.json", "c8f1d8c61f9da76f4cb49fd86322b6e685dba956"},
+ {"php/crappy.php", "9a48f23120e880dfbe41f7c9b7b708e9ee62a492"},
+ {"vendor/foo.go", "9dea2395f5403188298c1dabe8bdafe562c491e3"},
+ }},
+}
+
+func (s *FileSuite) TestIter(c *C) {
+ for i, t := range fileIterTests {
+ f := fixtures.ByURL(t.repo).One()
+ sto, err := filesystem.NewStorage(f.DotGit())
+ c.Assert(err, IsNil)
+
+ h := plumbing.NewHash(t.commit)
+ commit, err := GetCommit(sto, h)
+ c.Assert(err, IsNil, Commentf("subtest %d: %v (%s)", i, err, t.commit))
+
+ tree, err := commit.Tree()
+ c.Assert(err, IsNil)
+
+ iter := NewFileIter(sto, tree)
+ for k := 0; k < len(t.files); k++ {
+ exp := t.files[k]
+ file, err := iter.Next()
+ c.Assert(err, IsNil, Commentf("subtest %d, iter %d, err=%v", i, k, err))
+ c.Assert(file.Mode.String(), Equals, "-rw-r--r--")
+ c.Assert(file.Hash.IsZero(), Equals, false)
+ c.Assert(file.Hash, Equals, file.ID())
+ c.Assert(file.Name, Equals, exp.Name, Commentf("subtest %d, iter %d, name=%s, expected=%s", i, k, file.Name, exp.Hash))
+ c.Assert(file.Hash.String(), Equals, exp.Hash, Commentf("subtest %d, iter %d, hash=%v, expected=%s", i, k, file.Hash.String(), exp.Hash))
+ }
+ _, err = iter.Next()
+ c.Assert(err, Equals, io.EOF)
+ }
+}
+
+var contentsTests = []struct {
+ repo string // the repo name as in localRepos
+ commit string // the commit to search for the file
+ path string // the path of the file to find
+ contents string // expected contents of the file
+}{
+ {
+ "https://github.com/git-fixtures/basic.git",
+ "b029517f6300c2da0f4b651b8642506cd6aaf45d",
+ ".gitignore",
+ `*.class
+
+# Mobile Tools for Java (J2ME)
+.mtj.tmp/
+
+# Package Files #
+*.jar
+*.war
+*.ear
+
+# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
+hs_err_pid*
+`,
+ },
+ {
+ "https://github.com/git-fixtures/basic.git",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5",
+ "CHANGELOG",
+ `Initial changelog
+`,
+ },
+}
+
+func (s *FileSuite) TestContents(c *C) {
+ for i, t := range contentsTests {
+ f := fixtures.ByURL(t.repo).One()
+ sto, err := filesystem.NewStorage(f.DotGit())
+ c.Assert(err, IsNil)
+
+ h := plumbing.NewHash(t.commit)
+ commit, err := GetCommit(sto, h)
+ c.Assert(err, IsNil, Commentf("subtest %d: %v (%s)", i, err, t.commit))
+
+ file, err := commit.File(t.path)
+ c.Assert(err, IsNil)
+ content, err := file.Contents()
+ c.Assert(err, IsNil)
+ c.Assert(content, Equals, t.contents, Commentf(
+ "subtest %d: commit=%s, path=%s", i, t.commit, t.path))
+ }
+}
+
+var linesTests = []struct {
+ repo string // the repo name as in localRepos
+ commit string // the commit to search for the file
+ path string // the path of the file to find
+ lines []string // expected lines in the file
+}{
+ {
+ "https://github.com/git-fixtures/basic.git",
+ "b029517f6300c2da0f4b651b8642506cd6aaf45d",
+ ".gitignore",
+ []string{
+ "*.class",
+ "",
+ "# Mobile Tools for Java (J2ME)",
+ ".mtj.tmp/",
+ "",
+ "# Package Files #",
+ "*.jar",
+ "*.war",
+ "*.ear",
+ "",
+ "# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml",
+ "hs_err_pid*",
+ },
+ },
+ {
+ "https://github.com/git-fixtures/basic.git",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5",
+ "CHANGELOG",
+ []string{
+ "Initial changelog",
+ },
+ },
+}
+
+func (s *FileSuite) TestLines(c *C) {
+ for i, t := range linesTests {
+ f := fixtures.ByURL(t.repo).One()
+ sto, err := filesystem.NewStorage(f.DotGit())
+ c.Assert(err, IsNil)
+
+ h := plumbing.NewHash(t.commit)
+ commit, err := GetCommit(sto, h)
+ c.Assert(err, IsNil, Commentf("subtest %d: %v (%s)", i, err, t.commit))
+
+ file, err := commit.File(t.path)
+ c.Assert(err, IsNil)
+ lines, err := file.Lines()
+ c.Assert(err, IsNil)
+ c.Assert(lines, DeepEquals, t.lines, Commentf(
+ "subtest %d: commit=%s, path=%s", i, t.commit, t.path))
+ }
+}
+
+var ignoreEmptyDirEntriesTests = []struct {
+ repo string // the repo name as in localRepos
+ commit string // the commit to search for the file
+}{
+ {
+ "https://github.com/cpcs499/Final_Pres_P.git",
+ "70bade703ce556c2c7391a8065c45c943e8b6bc3",
+ // the Final dir in this commit is empty
+ },
+}
+
+// It is difficult to assert that we are ignoring an (empty) dir as even
+// if we don't, no files will be found in it.
+//
+// At least this test has a high chance of panicking if
+// we don't ignore empty dirs.
+func (s *FileSuite) TestIgnoreEmptyDirEntries(c *C) {
+ for i, t := range ignoreEmptyDirEntriesTests {
+ f := fixtures.ByURL(t.repo).One()
+ sto, err := filesystem.NewStorage(f.DotGit())
+ c.Assert(err, IsNil)
+
+ h := plumbing.NewHash(t.commit)
+ commit, err := GetCommit(sto, h)
+ c.Assert(err, IsNil, Commentf("subtest %d: %v (%s)", i, err, t.commit))
+
+ tree, err := commit.Tree()
+ c.Assert(err, IsNil)
+
+ iter := tree.Files()
+ defer iter.Close()
+ for file, err := iter.Next(); err == nil; file, err = iter.Next() {
+ _, _ = file.Contents()
+ // this would probably panic if we are not ignoring empty dirs
+ }
+ }
+}
+
+func (s *FileSuite) TestFileIter(c *C) {
+ hash := plumbing.NewHash("1669dce138d9b841a518c64b10914d88f5e488ea")
+ commit, err := GetCommit(s.Storer, hash)
+ c.Assert(err, IsNil)
+
+ tree, err := commit.Tree()
+ c.Assert(err, IsNil)
+
+ expected := []string{
+ ".gitignore",
+ "CHANGELOG",
+ "LICENSE",
+ "binary.jpg",
+ }
+
+ var count int
+ i := tree.Files()
+ i.ForEach(func(f *File) error {
+ c.Assert(f.Name, Equals, expected[count])
+ count++
+ return nil
+ })
+
+ c.Assert(count, Equals, 4)
+
+ count = 0
+ i = tree.Files()
+ i.ForEach(func(f *File) error {
+ count++
+ return storer.ErrStop
+ })
+
+ c.Assert(count, Equals, 1)
+}
diff --git a/plumbing/object/object.go b/plumbing/object/object.go
new file mode 100644
index 0000000..8bdbb2a
--- /dev/null
+++ b/plumbing/object/object.go
@@ -0,0 +1,217 @@
+package object
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "strconv"
+ "time"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+)
+
+// ErrUnsupportedObject trigger when a non-supported object is being decoded.
+var ErrUnsupportedObject = errors.New("unsupported object type")
+
+// Object is a generic representation of any git object. It is implemented by
+// Commit, Tree, Blob and Tag, and includes the functions that are common to
+// them.
+//
+// Object is returned when an object could of any type. It is frequently used
+// with a type cast to acquire the specific type of object:
+//
+// func process(obj Object) {
+// switch o := obj.(type) {
+// case *Commit:
+// // o is a Commit
+// case *Tree:
+// // o is a Tree
+// case *Blob:
+// // o is a Blob
+// case *Tag:
+// // o is a Tag
+// }
+// }
+//
+// This interface is intentionally different from plumbing.EncodedObject, which is a lower
+// level interface used by storage implementations to read and write objects.
+type Object interface {
+ ID() plumbing.Hash
+ Type() plumbing.ObjectType
+ Decode(plumbing.EncodedObject) error
+ Encode(plumbing.EncodedObject) error
+}
+
+// GetObject gets an object from an object storer and decodes it.
+func GetObject(s storer.EncodedObjectStorer, h plumbing.Hash) (Object, error) {
+ o, err := s.EncodedObject(plumbing.AnyObject, h)
+ if err != nil {
+ return nil, err
+ }
+
+ return DecodeObject(s, o)
+}
+
+// DecodeObject decodes an encoded object into an Object and associates it to
+// the given object storer.
+func DecodeObject(s storer.EncodedObjectStorer, o plumbing.EncodedObject) (Object, error) {
+ switch o.Type() {
+ case plumbing.CommitObject:
+ return DecodeCommit(s, o)
+ case plumbing.TreeObject:
+ return DecodeTree(s, o)
+ case plumbing.BlobObject:
+ return DecodeBlob(o)
+ case plumbing.TagObject:
+ return DecodeTag(s, o)
+ default:
+ return nil, plumbing.ErrInvalidType
+ }
+}
+
+// DateFormat is the format being use in the orignal git implementation
+const DateFormat = "Mon Jan 02 15:04:05 2006 -0700"
+
+// Signature represents an action signed by a person
+type Signature struct {
+ Name string
+ Email string
+ When time.Time
+}
+
+// Decode decodes a byte slice into a signature
+func (s *Signature) Decode(b []byte) {
+ open := bytes.IndexByte(b, '<')
+ close := bytes.IndexByte(b, '>')
+ if open == -1 || close == -1 {
+ return
+ }
+
+ s.Name = string(bytes.Trim(b[:open], " "))
+ s.Email = string(b[open+1 : close])
+
+ hasTime := close+2 < len(b)
+ if hasTime {
+ s.decodeTimeAndTimeZone(b[close+2:])
+ }
+}
+
+// Encode encodes a Signature into a writer.
+func (s *Signature) Encode(w io.Writer) error {
+ if _, err := fmt.Fprintf(w, "%s <%s> ", s.Name, s.Email); err != nil {
+ return err
+ }
+ if err := s.encodeTimeAndTimeZone(w); err != nil {
+ return err
+ }
+ return nil
+}
+
+var timeZoneLength = 5
+
+func (s *Signature) decodeTimeAndTimeZone(b []byte) {
+ space := bytes.IndexByte(b, ' ')
+ if space == -1 {
+ space = len(b)
+ }
+
+ ts, err := strconv.ParseInt(string(b[:space]), 10, 64)
+ if err != nil {
+ return
+ }
+
+ s.When = time.Unix(ts, 0).In(time.UTC)
+ var tzStart = space + 1
+ if tzStart >= len(b) || tzStart+timeZoneLength > len(b) {
+ return
+ }
+
+ tl, err := time.Parse("-0700", string(b[tzStart:tzStart+timeZoneLength]))
+ if err != nil {
+ return
+ }
+
+ s.When = s.When.In(tl.Location())
+}
+
+func (s *Signature) encodeTimeAndTimeZone(w io.Writer) error {
+ _, err := fmt.Fprintf(w, "%d %s", s.When.Unix(), s.When.Format("-0700"))
+ return err
+}
+
+func (s *Signature) String() string {
+ return fmt.Sprintf("%s <%s>", s.Name, s.Email)
+}
+
+// ObjectIter provides an iterator for a set of objects.
+type ObjectIter struct {
+ storer.EncodedObjectIter
+ s storer.EncodedObjectStorer
+}
+
+// NewObjectIter returns a ObjectIter for the given repository and underlying
+// object iterator.
+func NewObjectIter(s storer.EncodedObjectStorer, iter storer.EncodedObjectIter) *ObjectIter {
+ return &ObjectIter{iter, s}
+}
+
+// Next moves the iterator to the next object and returns a pointer to it. If it
+// has reached the end of the set it will return io.EOF.
+func (iter *ObjectIter) Next() (Object, error) {
+ for {
+ obj, err := iter.EncodedObjectIter.Next()
+ if err != nil {
+ return nil, err
+ }
+
+ o, err := iter.toObject(obj)
+ if err == plumbing.ErrInvalidType {
+ continue
+ }
+
+ if err != nil {
+ return nil, err
+ }
+
+ return o, nil
+ }
+}
+
+// ForEach call the cb function for each object contained on this iter until
+// an error happens or the end of the iter is reached. If ErrStop is sent
+// the iteration is stop but no error is returned. The iterator is closed.
+func (iter *ObjectIter) ForEach(cb func(Object) error) error {
+ return iter.EncodedObjectIter.ForEach(func(obj plumbing.EncodedObject) error {
+ o, err := iter.toObject(obj)
+ if err == plumbing.ErrInvalidType {
+ return nil
+ }
+
+ if err != nil {
+ return err
+ }
+
+ return cb(o)
+ })
+}
+
+func (iter *ObjectIter) toObject(obj plumbing.EncodedObject) (Object, error) {
+ switch obj.Type() {
+ case plumbing.BlobObject:
+ blob := &Blob{}
+ return blob, blob.Decode(obj)
+ case plumbing.TreeObject:
+ tree := &Tree{s: iter.s}
+ return tree, tree.Decode(obj)
+ case plumbing.CommitObject:
+ commit := &Commit{}
+ return commit, commit.Decode(obj)
+ case plumbing.TagObject:
+ tag := &Tag{}
+ return tag, tag.Decode(obj)
+ default:
+ return nil, plumbing.ErrInvalidType
+ }
+}
diff --git a/plumbing/object/object_test.go b/plumbing/object/object_test.go
new file mode 100644
index 0000000..04f2b73
--- /dev/null
+++ b/plumbing/object/object_test.go
@@ -0,0 +1,199 @@
+package object
+
+import (
+ "io"
+ "io/ioutil"
+ "testing"
+ "time"
+
+ "gopkg.in/src-d/go-git.v4/fixtures"
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+ "gopkg.in/src-d/go-git.v4/storage/filesystem"
+
+ . "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) { TestingT(t) }
+
+type BaseObjectsSuite struct {
+ fixtures.Suite
+ Storer storer.EncodedObjectStorer
+ Fixture *fixtures.Fixture
+}
+
+func (s *BaseObjectsSuite) SetUpSuite(c *C) {
+ s.Suite.SetUpSuite(c)
+ s.Fixture = fixtures.Basic().One()
+ storer, err := filesystem.NewStorage(s.Fixture.DotGit())
+ c.Assert(err, IsNil)
+ s.Storer = storer
+}
+
+func (s *BaseObjectsSuite) tag(c *C, h plumbing.Hash) *Tag {
+ t, err := GetTag(s.Storer, h)
+ c.Assert(err, IsNil)
+ return t
+}
+
+func (s *BaseObjectsSuite) tree(c *C, h plumbing.Hash) *Tree {
+ t, err := GetTree(s.Storer, h)
+ c.Assert(err, IsNil)
+ return t
+}
+
+func (s *BaseObjectsSuite) commit(c *C, h plumbing.Hash) *Commit {
+ commit, err := GetCommit(s.Storer, h)
+ c.Assert(err, IsNil)
+ return commit
+}
+
+type ObjectsSuite struct {
+ BaseObjectsSuite
+}
+
+var _ = Suite(&ObjectsSuite{})
+
+func (s *ObjectsSuite) TestNewCommit(c *C) {
+ hash := plumbing.NewHash("a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69")
+ commit := s.commit(c, hash)
+
+ c.Assert(commit.Hash, Equals, commit.ID())
+ c.Assert(commit.Hash.String(), Equals, "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69")
+
+ tree, err := commit.Tree()
+ c.Assert(err, IsNil)
+ c.Assert(tree.Hash.String(), Equals, "c2d30fa8ef288618f65f6eed6e168e0d514886f4")
+
+ parents := commit.Parents()
+ parentCommit, err := parents.Next()
+ c.Assert(err, IsNil)
+ c.Assert(parentCommit.Hash.String(), Equals, "b029517f6300c2da0f4b651b8642506cd6aaf45d")
+
+ parentCommit, err = parents.Next()
+ c.Assert(err, IsNil)
+ c.Assert(parentCommit.Hash.String(), Equals, "b8e471f58bcbca63b07bda20e428190409c2db47")
+
+ c.Assert(commit.Author.Email, Equals, "mcuadros@gmail.com")
+ c.Assert(commit.Author.Name, Equals, "Máximo Cuadros")
+ c.Assert(commit.Author.When.Format(time.RFC3339), Equals, "2015-03-31T13:47:14+02:00")
+ c.Assert(commit.Committer.Email, Equals, "mcuadros@gmail.com")
+ c.Assert(commit.Message, Equals, "Merge pull request #1 from dripolles/feature\n\nCreating changelog")
+}
+
+func (s *ObjectsSuite) TestParseTree(c *C) {
+ hash := plumbing.NewHash("a8d315b2b1c615d43042c3a62402b8a54288cf5c")
+ tree, err := GetTree(s.Storer, hash)
+ c.Assert(err, IsNil)
+
+ c.Assert(tree.Entries, HasLen, 8)
+
+ tree.buildMap()
+ c.Assert(tree.m, HasLen, 8)
+ c.Assert(tree.m[".gitignore"].Name, Equals, ".gitignore")
+ c.Assert(tree.m[".gitignore"].Mode.String(), Equals, "-rw-r--r--")
+ c.Assert(tree.m[".gitignore"].Hash.String(), Equals, "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88")
+
+ count := 0
+ iter := tree.Files()
+ defer iter.Close()
+ for f, err := iter.Next(); err == nil; f, err = iter.Next() {
+ count++
+ if f.Name == "go/example.go" {
+ reader, err := f.Reader()
+ c.Assert(err, IsNil)
+ defer func() { c.Assert(reader.Close(), IsNil) }()
+ content, _ := ioutil.ReadAll(reader)
+ c.Assert(content, HasLen, 2780)
+ }
+ }
+
+ c.Assert(count, Equals, 9)
+}
+
+func (s *ObjectsSuite) TestParseSignature(c *C) {
+ cases := map[string]Signature{
+ `Foo Bar <foo@bar.com> 1257894000 +0100`: {
+ Name: "Foo Bar",
+ Email: "foo@bar.com",
+ When: MustParseTime("2009-11-11 00:00:00 +0100"),
+ },
+ `Foo Bar <foo@bar.com> 1257894000 -0700`: {
+ Name: "Foo Bar",
+ Email: "foo@bar.com",
+ When: MustParseTime("2009-11-10 16:00:00 -0700"),
+ },
+ `Foo Bar <> 1257894000 +0100`: {
+ Name: "Foo Bar",
+ Email: "",
+ When: MustParseTime("2009-11-11 00:00:00 +0100"),
+ },
+ ` <> 1257894000`: {
+ Name: "",
+ Email: "",
+ When: MustParseTime("2009-11-10 23:00:00 +0000"),
+ },
+ `Foo Bar <foo@bar.com>`: {
+ Name: "Foo Bar",
+ Email: "foo@bar.com",
+ When: time.Time{},
+ },
+ ``: {
+ Name: "",
+ Email: "",
+ When: time.Time{},
+ },
+ `<`: {
+ Name: "",
+ Email: "",
+ When: time.Time{},
+ },
+ }
+
+ for raw, exp := range cases {
+ got := &Signature{}
+ got.Decode([]byte(raw))
+
+ c.Assert(got.Name, Equals, exp.Name)
+ c.Assert(got.Email, Equals, exp.Email)
+ c.Assert(got.When.Format(time.RFC3339), Equals, exp.When.Format(time.RFC3339))
+ }
+}
+
+func (s *ObjectsSuite) TestObjectIter(c *C) {
+ encIter, err := s.Storer.IterEncodedObjects(plumbing.AnyObject)
+ c.Assert(err, IsNil)
+ iter := NewObjectIter(s.Storer, encIter)
+
+ objects := []Object{}
+ iter.ForEach(func(o Object) error {
+ objects = append(objects, o)
+ return nil
+ })
+
+ c.Assert(len(objects) > 0, Equals, true)
+ iter.Close()
+
+ encIter, err = s.Storer.IterEncodedObjects(plumbing.AnyObject)
+ c.Assert(err, IsNil)
+ iter = NewObjectIter(s.Storer, encIter)
+
+ i := 0
+ for {
+ o, err := iter.Next()
+ if err == io.EOF {
+ break
+ }
+
+ c.Assert(err, IsNil)
+ c.Assert(o, DeepEquals, objects[i])
+ i += 1
+ }
+
+ iter.Close()
+}
+
+func MustParseTime(value string) time.Time {
+ t, _ := time.Parse("2006-01-02 15:04:05 -0700", value)
+ return t
+}
diff --git a/plumbing/object/tag.go b/plumbing/object/tag.go
new file mode 100644
index 0000000..5ca363d
--- /dev/null
+++ b/plumbing/object/tag.go
@@ -0,0 +1,268 @@
+package object
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ stdioutil "io/ioutil"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+ "gopkg.in/src-d/go-git.v4/utils/ioutil"
+)
+
+// Tag represents an annotated tag object. It points to a single git object of
+// any type, but tags typically are applied to commit or blob objects. It
+// provides a reference that associates the target with a tag name. It also
+// contains meta-information about the tag, including the tagger, tag date and
+// message.
+//
+// https://git-scm.com/book/en/v2/Git-Internals-Git-References#Tags
+type Tag struct {
+ Hash plumbing.Hash
+ Name string
+ Tagger Signature
+ Message string
+ TargetType plumbing.ObjectType
+ Target plumbing.Hash
+
+ s storer.EncodedObjectStorer
+}
+
+// GetTag gets a tag from an object storer and decodes it.
+func GetTag(s storer.EncodedObjectStorer, h plumbing.Hash) (*Tag, error) {
+ o, err := s.EncodedObject(plumbing.TagObject, h)
+ if err != nil {
+ return nil, err
+ }
+
+ return DecodeTag(s, o)
+}
+
+// DecodeTag decodes an encoded object into a *Commit and associates it to the
+// given object storer.
+func DecodeTag(s storer.EncodedObjectStorer, o plumbing.EncodedObject) (*Tag, error) {
+ t := &Tag{s: s}
+ if err := t.Decode(o); err != nil {
+ return nil, err
+ }
+
+ return t, nil
+}
+
+// ID returns the object ID of the tag, not the object that the tag references.
+// The returned value will always match the current value of Tag.Hash.
+//
+// ID is present to fulfill the Object interface.
+func (t *Tag) ID() plumbing.Hash {
+ return t.Hash
+}
+
+// Type returns the type of object. It always returns plumbing.TagObject.
+//
+// Type is present to fulfill the Object interface.
+func (t *Tag) Type() plumbing.ObjectType {
+ return plumbing.TagObject
+}
+
+// Decode transforms a plumbing.EncodedObject into a Tag struct.
+func (t *Tag) Decode(o plumbing.EncodedObject) (err error) {
+ if o.Type() != plumbing.TagObject {
+ return ErrUnsupportedObject
+ }
+
+ t.Hash = o.Hash()
+
+ reader, err := o.Reader()
+ if err != nil {
+ return err
+ }
+ defer ioutil.CheckClose(reader, &err)
+
+ r := bufio.NewReader(reader)
+ for {
+ line, err := r.ReadSlice('\n')
+ if err != nil && err != io.EOF {
+ return err
+ }
+
+ line = bytes.TrimSpace(line)
+ if len(line) == 0 {
+ break // Start of message
+ }
+
+ split := bytes.SplitN(line, []byte{' '}, 2)
+ switch string(split[0]) {
+ case "object":
+ t.Target = plumbing.NewHash(string(split[1]))
+ case "type":
+ t.TargetType, err = plumbing.ParseObjectType(string(split[1]))
+ if err != nil {
+ return err
+ }
+ case "tag":
+ t.Name = string(split[1])
+ case "tagger":
+ t.Tagger.Decode(split[1])
+ }
+
+ if err == io.EOF {
+ return nil
+ }
+ }
+
+ data, err := stdioutil.ReadAll(r)
+ if err != nil {
+ return err
+ }
+ t.Message = string(data)
+
+ return nil
+}
+
+// Encode transforms a Tag into a plumbing.EncodedObject.
+func (t *Tag) Encode(o plumbing.EncodedObject) error {
+ o.SetType(plumbing.TagObject)
+ w, err := o.Writer()
+ if err != nil {
+ return err
+ }
+ defer ioutil.CheckClose(w, &err)
+
+ if _, err = fmt.Fprintf(w,
+ "object %s\ntype %s\ntag %s\ntagger ",
+ t.Target.String(), t.TargetType.Bytes(), t.Name); err != nil {
+ return err
+ }
+
+ if err = t.Tagger.Encode(w); err != nil {
+ return err
+ }
+
+ if _, err = fmt.Fprint(w, "\n\n"); err != nil {
+ return err
+ }
+
+ if _, err = fmt.Fprint(w, t.Message); err != nil {
+ return err
+ }
+
+ return err
+}
+
+// Commit returns the commit pointed to by the tag. If the tag points to a
+// different type of object ErrUnsupportedObject will be returned.
+func (t *Tag) Commit() (*Commit, error) {
+ if t.TargetType != plumbing.CommitObject {
+ return nil, ErrUnsupportedObject
+ }
+
+ o, err := t.s.EncodedObject(plumbing.CommitObject, t.Target)
+ if err != nil {
+ return nil, err
+ }
+
+ return DecodeCommit(t.s, o)
+}
+
+// Tree returns the tree pointed to by the tag. If the tag points to a commit
+// object the tree of that commit will be returned. If the tag does not point
+// to a commit or tree object ErrUnsupportedObject will be returned.
+func (t *Tag) Tree() (*Tree, error) {
+ switch t.TargetType {
+ case plumbing.CommitObject:
+ c, err := t.Commit()
+ if err != nil {
+ return nil, err
+ }
+
+ return c.Tree()
+ case plumbing.TreeObject:
+ return GetTree(t.s, t.Target)
+ default:
+ return nil, ErrUnsupportedObject
+ }
+}
+
+// Blob returns the blob pointed to by the tag. If the tag points to a
+// different type of object ErrUnsupportedObject will be returned.
+func (t *Tag) Blob() (*Blob, error) {
+ if t.TargetType != plumbing.BlobObject {
+ return nil, ErrUnsupportedObject
+ }
+
+ return GetBlob(t.s, t.Target)
+}
+
+// Object returns the object pointed to by the tag.
+func (t *Tag) Object() (Object, error) {
+ o, err := t.s.EncodedObject(t.TargetType, t.Target)
+ if err != nil {
+ return nil, err
+ }
+
+ return DecodeObject(t.s, o)
+}
+
+// String returns the meta information contained in the tag as a formatted
+// string.
+func (t *Tag) String() string {
+ obj, _ := t.Object()
+
+ return fmt.Sprintf(
+ "%s %s\nTagger: %s\nDate: %s\n\n%s\n%s",
+ plumbing.TagObject, t.Name, t.Tagger.String(), t.Tagger.When.Format(DateFormat),
+ t.Message, objectAsString(obj),
+ )
+}
+
+// TagIter provides an iterator for a set of tags.
+type TagIter struct {
+ storer.EncodedObjectIter
+ s storer.EncodedObjectStorer
+}
+
+// NewTagIter returns a TagIter for the given object storer and underlying
+// object iterator.
+//
+// The returned TagIter will automatically skip over non-tag objects.
+func NewTagIter(s storer.EncodedObjectStorer, iter storer.EncodedObjectIter) *TagIter {
+ return &TagIter{iter, s}
+}
+
+// Next moves the iterator to the next tag and returns a pointer to it. If it
+// has reached the end of the set it will return io.EOF.
+func (iter *TagIter) Next() (*Tag, error) {
+ obj, err := iter.EncodedObjectIter.Next()
+ if err != nil {
+ return nil, err
+ }
+
+ return DecodeTag(iter.s, obj)
+}
+
+// ForEach call the cb function for each tag contained on this iter until
+// an error happends or the end of the iter is reached. If ErrStop is sent
+// the iteration is stop but no error is returned. The iterator is closed.
+func (iter *TagIter) ForEach(cb func(*Tag) error) error {
+ return iter.EncodedObjectIter.ForEach(func(obj plumbing.EncodedObject) error {
+ t, err := DecodeTag(iter.s, obj)
+ if err != nil {
+ return err
+ }
+
+ return cb(t)
+ })
+}
+
+func objectAsString(obj Object) string {
+ switch o := obj.(type) {
+ case *Commit:
+ return o.String()
+ case *Tag:
+ return o.String()
+ }
+
+ return ""
+}
diff --git a/plumbing/object/tag_test.go b/plumbing/object/tag_test.go
new file mode 100644
index 0000000..2721442
--- /dev/null
+++ b/plumbing/object/tag_test.go
@@ -0,0 +1,169 @@
+package object
+
+import (
+ "fmt"
+ "time"
+
+ "gopkg.in/src-d/go-git.v4/fixtures"
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/storage/filesystem"
+
+ . "gopkg.in/check.v1"
+)
+
+type TagSuite struct {
+ BaseObjectsSuite
+}
+
+var _ = Suite(&TagSuite{})
+
+func (s *TagSuite) SetUpSuite(c *C) {
+ s.BaseObjectsSuite.SetUpSuite(c)
+ storer, err := filesystem.NewStorage(fixtures.ByURL("https://github.com/git-fixtures/tags.git").One().DotGit())
+ c.Assert(err, IsNil)
+ s.Storer = storer
+}
+
+func (s *TagSuite) TestName(c *C) {
+ tag := s.tag(c, plumbing.NewHash("b742a2a9fa0afcfa9a6fad080980fbc26b007c69"))
+ c.Assert(tag.Name, Equals, "annotated-tag")
+}
+
+func (s *TagSuite) TestTagger(c *C) {
+ tag := s.tag(c, plumbing.NewHash("b742a2a9fa0afcfa9a6fad080980fbc26b007c69"))
+ c.Assert(tag.Tagger.String(), Equals, "Máximo Cuadros <mcuadros@gmail.com>")
+}
+
+func (s *TagSuite) TestAnnotated(c *C) {
+ tag := s.tag(c, plumbing.NewHash("b742a2a9fa0afcfa9a6fad080980fbc26b007c69"))
+ c.Assert(tag.Message, Equals, "example annotated tag\n")
+
+ commit, err := tag.Commit()
+ c.Assert(err, IsNil)
+ c.Assert(commit.Type(), Equals, plumbing.CommitObject)
+ c.Assert(commit.ID().String(), Equals, "f7b877701fbf855b44c0a9e86f3fdce2c298b07f")
+}
+
+func (s *TagSuite) TestCommit(c *C) {
+ tag := s.tag(c, plumbing.NewHash("ad7897c0fb8e7d9a9ba41fa66072cf06095a6cfc"))
+ c.Assert(tag.Message, Equals, "a tagged commit\n")
+
+ commit, err := tag.Commit()
+ c.Assert(err, IsNil)
+ c.Assert(commit.Type(), Equals, plumbing.CommitObject)
+ c.Assert(commit.ID().String(), Equals, "f7b877701fbf855b44c0a9e86f3fdce2c298b07f")
+}
+
+func (s *TagSuite) TestBlob(c *C) {
+ tag := s.tag(c, plumbing.NewHash("fe6cb94756faa81e5ed9240f9191b833db5f40ae"))
+ c.Assert(tag.Message, Equals, "a tagged blob\n")
+
+ blob, err := tag.Blob()
+ c.Assert(err, IsNil)
+ c.Assert(blob.Type(), Equals, plumbing.BlobObject)
+ c.Assert(blob.ID().String(), Equals, "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391")
+}
+
+func (s *TagSuite) TestTree(c *C) {
+ tag := s.tag(c, plumbing.NewHash("152175bf7e5580299fa1f0ba41ef6474cc043b70"))
+ c.Assert(tag.Message, Equals, "a tagged tree\n")
+
+ tree, err := tag.Tree()
+ c.Assert(err, IsNil)
+ c.Assert(tree.Type(), Equals, plumbing.TreeObject)
+ c.Assert(tree.ID().String(), Equals, "70846e9a10ef7b41064b40f07713d5b8b9a8fc73")
+}
+
+func (s *TagSuite) TestTreeFromCommit(c *C) {
+ tag := s.tag(c, plumbing.NewHash("ad7897c0fb8e7d9a9ba41fa66072cf06095a6cfc"))
+ c.Assert(tag.Message, Equals, "a tagged commit\n")
+
+ tree, err := tag.Tree()
+ c.Assert(err, IsNil)
+ c.Assert(tree.Type(), Equals, plumbing.TreeObject)
+ c.Assert(tree.ID().String(), Equals, "70846e9a10ef7b41064b40f07713d5b8b9a8fc73")
+}
+
+func (s *TagSuite) TestObject(c *C) {
+ tag := s.tag(c, plumbing.NewHash("ad7897c0fb8e7d9a9ba41fa66072cf06095a6cfc"))
+
+ obj, err := tag.Object()
+ c.Assert(err, IsNil)
+ c.Assert(obj.Type(), Equals, plumbing.CommitObject)
+ c.Assert(obj.ID().String(), Equals, "f7b877701fbf855b44c0a9e86f3fdce2c298b07f")
+}
+
+func (s *TagSuite) TestTagItter(c *C) {
+ iter, err := s.Storer.IterEncodedObjects(plumbing.TagObject)
+ c.Assert(err, IsNil)
+
+ var count int
+ i := NewTagIter(s.Storer, iter)
+ err = i.ForEach(func(t *Tag) error {
+ count++
+ return nil
+ })
+
+ c.Assert(err, IsNil)
+ c.Assert(count, Equals, 4)
+}
+
+func (s *TagSuite) TestTagIterError(c *C) {
+ iter, err := s.Storer.IterEncodedObjects(plumbing.TagObject)
+ c.Assert(err, IsNil)
+
+ i := NewTagIter(s.Storer, iter)
+ err = i.ForEach(func(t *Tag) error {
+ return fmt.Errorf("a random error")
+ })
+
+ c.Assert(err, NotNil)
+}
+
+func (s *TagSuite) TestTagEncodeDecodeIdempotent(c *C) {
+ ts, err := time.Parse(time.RFC3339, "2006-01-02T15:04:05-07:00")
+ c.Assert(err, IsNil)
+ tags := []*Tag{
+ {
+ Name: "foo",
+ Tagger: Signature{Name: "Foo", Email: "foo@example.local", When: ts},
+ Message: "Message\n\nFoo\nBar\nBaz\n\n",
+ TargetType: plumbing.BlobObject,
+ Target: plumbing.NewHash("b029517f6300c2da0f4b651b8642506cd6aaf45d"),
+ },
+ {
+ Name: "foo",
+ Tagger: Signature{Name: "Foo", Email: "foo@example.local", When: ts},
+ TargetType: plumbing.BlobObject,
+ Target: plumbing.NewHash("b029517f6300c2da0f4b651b8642506cd6aaf45d"),
+ },
+ }
+ for _, tag := range tags {
+ obj := &plumbing.MemoryObject{}
+ err = tag.Encode(obj)
+ c.Assert(err, IsNil)
+ newTag := &Tag{}
+ err = newTag.Decode(obj)
+ c.Assert(err, IsNil)
+ tag.Hash = obj.Hash()
+ c.Assert(newTag, DeepEquals, tag)
+ }
+}
+
+func (s *TagSuite) TestString(c *C) {
+ tag := s.tag(c, plumbing.NewHash("b742a2a9fa0afcfa9a6fad080980fbc26b007c69"))
+ c.Assert(tag.String(), Equals, ""+
+ "tag annotated-tag\n"+
+ "Tagger: Máximo Cuadros <mcuadros@gmail.com>\n"+
+ "Date: Wed Sep 21 21:13:35 2016 +0200\n"+
+ "\n"+
+ "example annotated tag\n"+
+ "\n"+
+ "commit f7b877701fbf855b44c0a9e86f3fdce2c298b07f\n"+
+ "Author: Máximo Cuadros <mcuadros@gmail.com>\n"+
+ "Date: Wed Sep 21 21:10:52 2016 +0200\n"+
+ "\n"+
+ " initial\n"+
+ "\n",
+ )
+}
diff --git a/plumbing/object/tree.go b/plumbing/object/tree.go
new file mode 100644
index 0000000..7a8c1a3
--- /dev/null
+++ b/plumbing/object/tree.go
@@ -0,0 +1,449 @@
+package object
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "path"
+ "strconv"
+ "strings"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+ "gopkg.in/src-d/go-git.v4/utils/ioutil"
+)
+
+const (
+ maxTreeDepth = 1024
+ startingStackSize = 8
+ submoduleMode = 0160000
+ directoryMode = 0040000
+)
+
+// New errors defined by this package.
+var (
+ ErrMaxTreeDepth = errors.New("maximum tree depth exceeded")
+ ErrFileNotFound = errors.New("file not found")
+)
+
+// Tree is basically like a directory - it references a bunch of other trees
+// and/or blobs (i.e. files and sub-directories)
+type Tree struct {
+ Entries []TreeEntry
+ Hash plumbing.Hash
+
+ s storer.EncodedObjectStorer
+ m map[string]*TreeEntry
+}
+
+// GetTree gets a tree from an object storer and decodes it.
+func GetTree(s storer.EncodedObjectStorer, h plumbing.Hash) (*Tree, error) {
+ o, err := s.EncodedObject(plumbing.TreeObject, h)
+ if err != nil {
+ return nil, err
+ }
+
+ return DecodeTree(s, o)
+}
+
+// DecodeTree decodes an encoded object into a *Tree and associates it to the
+// given object storer.
+func DecodeTree(s storer.EncodedObjectStorer, o plumbing.EncodedObject) (*Tree, error) {
+ t := &Tree{s: s}
+ if err := t.Decode(o); err != nil {
+ return nil, err
+ }
+
+ return t, nil
+}
+
+// TreeEntry represents a file
+type TreeEntry struct {
+ Name string
+ Mode os.FileMode
+ Hash plumbing.Hash
+}
+
+// File returns the hash of the file identified by the `path` argument.
+// The path is interpreted as relative to the tree receiver.
+func (t *Tree) File(path string) (*File, error) {
+ e, err := t.findEntry(path)
+ if err != nil {
+ return nil, ErrFileNotFound
+ }
+
+ blob, err := GetBlob(t.s, e.Hash)
+ if err != nil {
+ return nil, err
+ }
+
+ return NewFile(path, e.Mode, blob), nil
+}
+
+// TreeEntryFile returns the *File for a given *TreeEntry.
+func (t *Tree) TreeEntryFile(e *TreeEntry) (*File, error) {
+ blob, err := GetBlob(t.s, e.Hash)
+ if err != nil {
+ return nil, err
+ }
+
+ return NewFile(e.Name, e.Mode, blob), nil
+}
+
+func (t *Tree) findEntry(path string) (*TreeEntry, error) {
+ pathParts := strings.Split(path, "/")
+
+ var tree *Tree
+ var err error
+ for tree = t; len(pathParts) > 1; pathParts = pathParts[1:] {
+ if tree, err = tree.dir(pathParts[0]); err != nil {
+ return nil, err
+ }
+ }
+
+ return tree.entry(pathParts[0])
+}
+
+var errDirNotFound = errors.New("directory not found")
+
+func (t *Tree) dir(baseName string) (*Tree, error) {
+ entry, err := t.entry(baseName)
+ if err != nil {
+ return nil, errDirNotFound
+ }
+
+ obj, err := t.s.EncodedObject(plumbing.TreeObject, entry.Hash)
+ if err != nil {
+ return nil, err
+ }
+
+ tree := &Tree{s: t.s}
+ tree.Decode(obj)
+
+ return tree, nil
+}
+
+var errEntryNotFound = errors.New("entry not found")
+
+func (t *Tree) entry(baseName string) (*TreeEntry, error) {
+ if t.m == nil {
+ t.buildMap()
+ }
+ entry, ok := t.m[baseName]
+ if !ok {
+ return nil, errEntryNotFound
+ }
+
+ return entry, nil
+}
+
+// Files returns a FileIter allowing to iterate over the Tree
+func (t *Tree) Files() *FileIter {
+ return NewFileIter(t.s, t)
+}
+
+// ID returns the object ID of the tree. The returned value will always match
+// the current value of Tree.Hash.
+//
+// ID is present to fulfill the Object interface.
+func (t *Tree) ID() plumbing.Hash {
+ return t.Hash
+}
+
+// Type returns the type of object. It always returns plumbing.TreeObject.
+func (t *Tree) Type() plumbing.ObjectType {
+ return plumbing.TreeObject
+}
+
+// Decode transform an plumbing.EncodedObject into a Tree struct
+func (t *Tree) Decode(o plumbing.EncodedObject) (err error) {
+ if o.Type() != plumbing.TreeObject {
+ return ErrUnsupportedObject
+ }
+
+ t.Hash = o.Hash()
+ if o.Size() == 0 {
+ return nil
+ }
+
+ t.Entries = nil
+ t.m = nil
+
+ reader, err := o.Reader()
+ if err != nil {
+ return err
+ }
+ defer ioutil.CheckClose(reader, &err)
+
+ r := bufio.NewReader(reader)
+ for {
+ mode, err := r.ReadString(' ')
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+
+ return err
+ }
+
+ fm, err := t.decodeFileMode(mode[:len(mode)-1])
+ if err != nil && err != io.EOF {
+ return err
+ }
+
+ name, err := r.ReadString(0)
+ if err != nil && err != io.EOF {
+ return err
+ }
+
+ var hash plumbing.Hash
+ if _, err = io.ReadFull(r, hash[:]); err != nil {
+ return err
+ }
+
+ baseName := name[:len(name)-1]
+ t.Entries = append(t.Entries, TreeEntry{
+ Hash: hash,
+ Mode: fm,
+ Name: baseName,
+ })
+ }
+
+ return nil
+}
+
+func (t *Tree) decodeFileMode(mode string) (os.FileMode, error) {
+ fm, err := strconv.ParseInt(mode, 8, 32)
+ if err != nil && err != io.EOF {
+ return 0, err
+ }
+
+ m := os.FileMode(fm)
+ switch fm {
+ case 0040000: //tree
+ m = m | os.ModeDir
+ case 0120000: //symlink
+ m = m | os.ModeSymlink
+ }
+
+ return m, nil
+}
+
+// Encode transforms a Tree into a plumbing.EncodedObject.
+func (t *Tree) Encode(o plumbing.EncodedObject) error {
+ o.SetType(plumbing.TreeObject)
+ w, err := o.Writer()
+ if err != nil {
+ return err
+ }
+
+ var size int
+ defer ioutil.CheckClose(w, &err)
+ for _, entry := range t.Entries {
+ n, err := fmt.Fprintf(w, "%o %s", entry.Mode, entry.Name)
+ if err != nil {
+ return err
+ }
+
+ size += n
+ n, err = w.Write([]byte{0x00})
+ if err != nil {
+ return err
+ }
+
+ size += n
+ n, err = w.Write([]byte(entry.Hash[:]))
+ if err != nil {
+ return err
+ }
+ size += n
+ }
+
+ o.SetSize(int64(size))
+ return err
+}
+
+func (t *Tree) buildMap() {
+ t.m = make(map[string]*TreeEntry)
+ for i := 0; i < len(t.Entries); i++ {
+ t.m[t.Entries[i].Name] = &t.Entries[i]
+ }
+}
+
+// treeEntryIter facilitates iterating through the TreeEntry objects in a Tree.
+type treeEntryIter struct {
+ t *Tree
+ pos int
+}
+
+func (iter *treeEntryIter) Next() (TreeEntry, error) {
+ if iter.pos >= len(iter.t.Entries) {
+ return TreeEntry{}, io.EOF
+ }
+ iter.pos++
+ return iter.t.Entries[iter.pos-1], nil
+}
+
+// TreeWalker provides a means of walking through all of the entries in a Tree.
+type TreeWalker struct {
+ stack []treeEntryIter
+ base string
+ recursive bool
+
+ s storer.EncodedObjectStorer
+ t *Tree
+}
+
+// NewTreeWalker returns a new TreeWalker for the given tree.
+//
+// It is the caller's responsibility to call Close() when finished with the
+// tree walker.
+func NewTreeWalker(t *Tree, recursive bool) *TreeWalker {
+ stack := make([]treeEntryIter, 0, startingStackSize)
+ stack = append(stack, treeEntryIter{t, 0})
+
+ return &TreeWalker{
+ stack: stack,
+ recursive: recursive,
+
+ s: t.s,
+ t: t,
+ }
+}
+
+// Next returns the next object from the tree. Objects are returned in order
+// and subtrees are included. After the last object has been returned further
+// calls to Next() will return io.EOF.
+//
+// In the current implementation any objects which cannot be found in the
+// underlying repository will be skipped automatically. It is possible that this
+// may change in future versions.
+func (w *TreeWalker) Next() (name string, entry TreeEntry, err error) {
+ var obj Object
+ for {
+ current := len(w.stack) - 1
+ if current < 0 {
+ // Nothing left on the stack so we're finished
+ err = io.EOF
+ return
+ }
+
+ if current > maxTreeDepth {
+ // We're probably following bad data or some self-referencing tree
+ err = ErrMaxTreeDepth
+ return
+ }
+
+ entry, err = w.stack[current].Next()
+ if err == io.EOF {
+ // Finished with the current tree, move back up to the parent
+ w.stack = w.stack[:current]
+ w.base, _ = path.Split(w.base)
+ w.base = path.Clean(w.base) // Remove trailing slash
+ continue
+ }
+
+ if err != nil {
+ return
+ }
+
+ if entry.Mode == submoduleMode {
+ err = nil
+ continue
+ }
+
+ if entry.Mode.IsDir() {
+ obj, err = GetTree(w.s, entry.Hash)
+ }
+
+ name = path.Join(w.base, entry.Name)
+
+ if err != nil {
+ err = io.EOF
+ return
+ }
+
+ break
+ }
+
+ if !w.recursive {
+ return
+ }
+
+ if t, ok := obj.(*Tree); ok {
+ w.stack = append(w.stack, treeEntryIter{t, 0})
+ w.base = path.Join(w.base, entry.Name)
+ }
+
+ return
+}
+
+// Tree returns the tree that the tree walker most recently operated on.
+func (w *TreeWalker) Tree() *Tree {
+ current := len(w.stack) - 1
+ if w.stack[current].pos == 0 {
+ current--
+ }
+
+ if current < 0 {
+ return nil
+ }
+
+ return w.stack[current].t
+}
+
+// Close releases any resources used by the TreeWalker.
+func (w *TreeWalker) Close() {
+ w.stack = nil
+}
+
+// TreeIter provides an iterator for a set of trees.
+type TreeIter struct {
+ storer.EncodedObjectIter
+ s storer.EncodedObjectStorer
+}
+
+// NewTreeIter returns a TreeIter for the given repository and underlying
+// object iterator.
+//
+// The returned TreeIter will automatically skip over non-tree objects.
+func NewTreeIter(s storer.EncodedObjectStorer, iter storer.EncodedObjectIter) *TreeIter {
+ return &TreeIter{iter, s}
+}
+
+// Next moves the iterator to the next tree and returns a pointer to it. If it
+// has reached the end of the set it will return io.EOF.
+func (iter *TreeIter) Next() (*Tree, error) {
+ for {
+ obj, err := iter.EncodedObjectIter.Next()
+ if err != nil {
+ return nil, err
+ }
+
+ if obj.Type() != plumbing.TreeObject {
+ continue
+ }
+
+ return DecodeTree(iter.s, obj)
+ }
+}
+
+// ForEach call the cb function for each tree contained on this iter until
+// an error happens or the end of the iter is reached. If ErrStop is sent
+// the iteration is stop but no error is returned. The iterator is closed.
+func (iter *TreeIter) ForEach(cb func(*Tree) error) error {
+ return iter.EncodedObjectIter.ForEach(func(obj plumbing.EncodedObject) error {
+ if obj.Type() != plumbing.TreeObject {
+ return nil
+ }
+
+ t, err := DecodeTree(iter.s, obj)
+ if err != nil {
+ return err
+ }
+
+ return cb(t)
+ })
+}
diff --git a/plumbing/object/tree_test.go b/plumbing/object/tree_test.go
new file mode 100644
index 0000000..00601c1
--- /dev/null
+++ b/plumbing/object/tree_test.go
@@ -0,0 +1,1425 @@
+package object
+
+import (
+ "io"
+ "os"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+)
+
+type TreeSuite struct {
+ BaseObjectsSuite
+ Tree *Tree
+}
+
+var _ = Suite(&TreeSuite{})
+
+func (s *TreeSuite) SetUpSuite(c *C) {
+ s.BaseObjectsSuite.SetUpSuite(c)
+ hash := plumbing.NewHash("a8d315b2b1c615d43042c3a62402b8a54288cf5c")
+
+ s.Tree = s.tree(c, hash)
+}
+
+func (s *TreeSuite) TestDecode(c *C) {
+ c.Assert(s.Tree.Entries, HasLen, 8)
+ c.Assert(s.Tree.Entries[0].Name, Equals, ".gitignore")
+ c.Assert(s.Tree.Entries[0].Hash.String(), Equals, "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88")
+ c.Assert(s.Tree.Entries[0].Mode.String(), Equals, "-rw-r--r--")
+ c.Assert(s.Tree.Entries[4].Name, Equals, "go")
+ c.Assert(s.Tree.Entries[4].Hash.String(), Equals, "a39771a7651f97faf5c72e08224d857fc35133db")
+ c.Assert(s.Tree.Entries[4].Mode.String(), Equals, "d---------")
+}
+
+func (s *TreeSuite) TestDecodeNonTree(c *C) {
+ hash := plumbing.NewHash("9a48f23120e880dfbe41f7c9b7b708e9ee62a492")
+ blob, err := s.Storer.EncodedObject(plumbing.BlobObject, hash)
+ c.Assert(err, IsNil)
+
+ tree := &Tree{}
+ err = tree.Decode(blob)
+ c.Assert(err, Equals, ErrUnsupportedObject)
+}
+
+func (s *TreeSuite) TestType(c *C) {
+ c.Assert(s.Tree.Type(), Equals, plumbing.TreeObject)
+}
+
+func (s *TreeSuite) TestFile(c *C) {
+ f, err := s.Tree.File("LICENSE")
+ c.Assert(err, IsNil)
+ c.Assert(f.Name, Equals, "LICENSE")
+}
+
+func (s *TreeSuite) TestFileNotFound(c *C) {
+ f, err := s.Tree.File("not-found")
+ c.Assert(f, IsNil)
+ c.Assert(err, Equals, ErrFileNotFound)
+}
+
+func (s *TreeSuite) TestFiles(c *C) {
+ var count int
+ err := s.Tree.Files().ForEach(func(f *File) error {
+ count++
+ return nil
+ })
+
+ c.Assert(err, IsNil)
+ c.Assert(count, Equals, 9)
+}
+
+// This plumbing.EncodedObject implementation has a reader that only returns 6
+// bytes at a time, this should simulate the conditions when a read
+// returns less bytes than asked, for example when reading a hash which
+// is bigger than 6 bytes.
+type SortReadObject struct {
+ t plumbing.ObjectType
+ h plumbing.Hash
+ cont []byte
+ sz int64
+}
+
+func (o *SortReadObject) Hash() plumbing.Hash { return o.h }
+func (o *SortReadObject) Type() plumbing.ObjectType { return o.t }
+func (o *SortReadObject) SetType(t plumbing.ObjectType) { o.t = t }
+func (o *SortReadObject) Size() int64 { return o.sz }
+func (o *SortReadObject) SetSize(s int64) { o.sz = s }
+func (o *SortReadObject) Content() []byte { return o.cont }
+func (o *SortReadObject) Reader() (io.ReadCloser, error) {
+ return &SortReadCloser{pos: 0, data: o.cont}, nil
+}
+func (o *SortReadObject) Writer() (io.WriteCloser, error) { return o, nil }
+func (o *SortReadObject) Write(p []byte) (n int, err error) { return len(p), nil }
+func (o *SortReadObject) Close() error { return nil }
+
+// a ReadCloser that only returns 6 bytes at a time, to simulate incomplete reads.
+type SortReadCloser struct {
+ pos int
+ data []byte
+}
+
+func (o *SortReadCloser) Close() error { return nil }
+func (o *SortReadCloser) Read(p []byte) (int, error) {
+ if o.pos == len(o.data) {
+ return 0, io.EOF
+ }
+
+ sz := len(p)
+ remaining := len(o.data) - o.pos
+ if sz > 6 { // don't read more than 6 bytes at a time
+ sz = 6
+ }
+ if sz > remaining {
+ sz = remaining
+ }
+
+ src := o.data[o.pos : o.pos+sz]
+ nw := copy(p, src)
+ o.pos += nw
+
+ return nw, nil
+}
+
+func (s *TreeSuite) TestTreeDecodeEncodeIdempotent(c *C) {
+ trees := []*Tree{
+ {
+ Entries: []TreeEntry{
+ {"foo", os.FileMode(0), plumbing.NewHash("b029517f6300c2da0f4b651b8642506cd6aaf45d")},
+ {"bar", os.FileMode(0), plumbing.NewHash("c029517f6300c2da0f4b651b8642506cd6aaf45d")},
+ {"baz", os.FileMode(0), plumbing.NewHash("d029517f6300c2da0f4b651b8642506cd6aaf45d")},
+ },
+ },
+ }
+ for _, tree := range trees {
+ obj := &plumbing.MemoryObject{}
+ err := tree.Encode(obj)
+ c.Assert(err, IsNil)
+ newTree := &Tree{}
+ err = newTree.Decode(obj)
+ c.Assert(err, IsNil)
+ tree.Hash = obj.Hash()
+ c.Assert(newTree, DeepEquals, tree)
+ }
+}
+
+func (s *TreeSuite) TestTreeIter(c *C) {
+ encIter, err := s.Storer.IterEncodedObjects(plumbing.TreeObject)
+ c.Assert(err, IsNil)
+ iter := NewTreeIter(s.Storer, encIter)
+
+ trees := []*Tree{}
+ iter.ForEach(func(t *Tree) error {
+ t.s = nil
+ trees = append(trees, t)
+ return nil
+ })
+
+ c.Assert(len(trees) > 0, Equals, true)
+ iter.Close()
+
+ encIter, err = s.Storer.IterEncodedObjects(plumbing.TreeObject)
+ c.Assert(err, IsNil)
+ iter = NewTreeIter(s.Storer, encIter)
+
+ i := 0
+ for {
+ t, err := iter.Next()
+ if err == io.EOF {
+ break
+ }
+
+ t.s = nil
+ c.Assert(err, IsNil)
+ c.Assert(t, DeepEquals, trees[i])
+ i += 1
+ }
+
+ iter.Close()
+}
+
+func (s *TreeSuite) TestTreeWalkerNext(c *C) {
+ commit, err := GetCommit(s.Storer, plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"))
+ c.Assert(err, IsNil)
+ tree, err := commit.Tree()
+ c.Assert(err, IsNil)
+
+ walker := NewTreeWalker(tree, true)
+ for _, e := range treeWalkerExpects {
+ name, entry, err := walker.Next()
+ if err == io.EOF {
+ break
+ }
+
+ c.Assert(err, IsNil)
+ c.Assert(name, Equals, e.Path)
+ c.Assert(entry.Name, Equals, e.Name)
+ c.Assert(entry.Mode.String(), Equals, e.Mode)
+ c.Assert(entry.Hash.String(), Equals, e.Hash)
+
+ c.Assert(walker.Tree().ID().String(), Equals, e.Tree)
+ }
+}
+
+func (s *TreeSuite) TestTreeWalkerNextNonRecursive(c *C) {
+ commit := s.commit(c, plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"))
+ tree, err := commit.Tree()
+ c.Assert(err, IsNil)
+
+ var count int
+ walker := NewTreeWalker(tree, false)
+ for {
+ name, entry, err := walker.Next()
+ if err == io.EOF {
+ break
+ }
+
+ c.Assert(err, IsNil)
+ c.Assert(name, Not(Equals), "")
+ c.Assert(entry, NotNil)
+
+ c.Assert(walker.Tree().ID().String(), Equals, "a8d315b2b1c615d43042c3a62402b8a54288cf5c")
+
+ count++
+ }
+
+ c.Assert(count, Equals, 8)
+}
+
+var treeWalkerExpects = []struct {
+ Path, Mode, Name, Hash, Tree string
+}{{
+ Path: ".gitignore", Mode: "-rw-r--r--", Name: ".gitignore",
+ Hash: "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
+}, {
+ Path: "CHANGELOG", Mode: "-rw-r--r--", Name: "CHANGELOG",
+ Hash: "d3ff53e0564a9f87d8e84b6e28e5060e517008aa", Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
+}, {
+ Path: "LICENSE", Mode: "-rw-r--r--", Name: "LICENSE",
+ Hash: "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
+}, {
+ Path: "binary.jpg", Mode: "-rw-r--r--", Name: "binary.jpg",
+ Hash: "d5c0f4ab811897cadf03aec358ae60d21f91c50d", Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
+}, {
+ Path: "go", Mode: "d---------", Name: "go",
+ Hash: "a39771a7651f97faf5c72e08224d857fc35133db", Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
+}, {
+ Path: "go/example.go", Mode: "-rw-r--r--", Name: "example.go",
+ Hash: "880cd14280f4b9b6ed3986d6671f907d7cc2a198", Tree: "a39771a7651f97faf5c72e08224d857fc35133db",
+}, {
+ Path: "json", Mode: "d---------", Name: "json",
+ Hash: "5a877e6a906a2743ad6e45d99c1793642aaf8eda", Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
+}, {
+ Path: "json/long.json", Mode: "-rw-r--r--", Name: "long.json",
+ Hash: "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9", Tree: "5a877e6a906a2743ad6e45d99c1793642aaf8eda",
+}, {
+ Path: "json/short.json", Mode: "-rw-r--r--", Name: "short.json",
+ Hash: "c8f1d8c61f9da76f4cb49fd86322b6e685dba956", Tree: "5a877e6a906a2743ad6e45d99c1793642aaf8eda",
+}, {
+ Path: "php", Mode: "d---------", Name: "php",
+ Hash: "586af567d0bb5e771e49bdd9434f5e0fb76d25fa", Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
+}, {
+ Path: "php/crappy.php", Mode: "-rw-r--r--", Name: "crappy.php",
+ Hash: "9a48f23120e880dfbe41f7c9b7b708e9ee62a492", Tree: "586af567d0bb5e771e49bdd9434f5e0fb76d25fa",
+}, {
+ Path: "vendor", Mode: "d---------", Name: "vendor",
+ Hash: "cf4aa3b38974fb7d81f367c0830f7d78d65ab86b", Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
+}, {
+ Path: "vendor/foo.go", Mode: "-rw-r--r--", Name: "foo.go",
+ Hash: "9dea2395f5403188298c1dabe8bdafe562c491e3", Tree: "cf4aa3b38974fb7d81f367c0830f7d78d65ab86b",
+}}
+
+func entriesEquals(a, b []TreeEntry) bool {
+ if a == nil && b == nil {
+ return true
+ }
+
+ if a == nil || b == nil {
+ return false
+ }
+
+ if len(a) != len(b) {
+ return false
+ }
+
+ for i, v := range a {
+ if v != b[i] {
+ return false
+ }
+ }
+
+ return true
+}
+
+// When decoding a tree we were not checking the return value of read
+// when reading hashes. As a hash is quite small, it worked well nearly
+// all the time.
+//
+// I have found some examples of repos where the read is incomplete and
+// the tree decode fails, for example
+// http://github.com/sqlcipher/sqlcipher.git, object
+// 0ba19d22411289293ab5c012891529967d7c933e.
+//
+// This tests is performed with that object but using a SortReadObject to
+// simulate incomplete reads on all platforms and operating systems.
+func (s *TreeSuite) TestTreeDecodeReadBug(c *C) {
+ cont := []byte{
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x61, 0x6c, 0x74,
+ 0x65, 0x72, 0x2e, 0x63, 0x0, 0xa4, 0x9d, 0x33, 0x49, 0xd7,
+ 0xe2, 0x3f, 0xb5, 0x81, 0x19, 0x4f, 0x4c, 0xb5, 0x9a, 0xc0,
+ 0xd5, 0x1b, 0x2, 0x1f, 0x78, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x2e,
+ 0x63, 0x0, 0x9a, 0x3e, 0x95, 0x97, 0xdb, 0xb, 0x3, 0x20,
+ 0x77, 0xc9, 0x1d, 0x96, 0x9d, 0x22, 0xc6, 0x27, 0x3f, 0x70,
+ 0x2a, 0xc, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x61,
+ 0x74, 0x74, 0x61, 0x63, 0x68, 0x2e, 0x63, 0x0, 0xb8, 0xe1,
+ 0x21, 0x99, 0xb5, 0x7d, 0xe8, 0x11, 0xea, 0xe0, 0xd0, 0x61,
+ 0x42, 0xd5, 0xac, 0x4f, 0xd4, 0x30, 0xb1, 0xd8, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x61, 0x75, 0x74, 0x68, 0x2e,
+ 0x63, 0x0, 0xd3, 0x8b, 0xb8, 0x36, 0xa7, 0x84, 0xfb, 0xfa,
+ 0xb6, 0xab, 0x7b, 0x3, 0xd4, 0xe6, 0xdd, 0x43, 0xed, 0xc4,
+ 0x1f, 0xa7, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x62,
+ 0x61, 0x63, 0x6b, 0x75, 0x70, 0x2e, 0x63, 0x0, 0x25, 0x2f,
+ 0x61, 0xcf, 0xca, 0xa8, 0xfc, 0xf3, 0x13, 0x7e, 0x8, 0xed,
+ 0x68, 0x47, 0xdc, 0xfe, 0x1d, 0xc1, 0xde, 0x54, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x62, 0x69, 0x74, 0x76, 0x65,
+ 0x63, 0x2e, 0x63, 0x0, 0x52, 0x18, 0x4a, 0xa9, 0x64, 0xce,
+ 0x18, 0x98, 0xf3, 0x5d, 0x1b, 0x3d, 0x87, 0x87, 0x1c, 0x2d,
+ 0xe, 0xf4, 0xc5, 0x3d, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x62, 0x74, 0x6d, 0x75, 0x74, 0x65, 0x78, 0x2e, 0x63,
+ 0x0, 0xd8, 0x7d, 0x4d, 0x5f, 0xee, 0xb6, 0x30, 0x7a, 0xec,
+ 0xdc, 0x9a, 0x83, 0x11, 0x14, 0x89, 0xab, 0x30, 0xc6, 0x78,
+ 0xc3, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x62, 0x74,
+ 0x72, 0x65, 0x65, 0x2e, 0x63, 0x0, 0x3c, 0xa6, 0x5, 0x83,
+ 0xe3, 0xc8, 0xe3, 0x12, 0x0, 0xf9, 0x73, 0xe0, 0xe9, 0xc4,
+ 0x53, 0x62, 0x58, 0xb2, 0x64, 0x39, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x62, 0x74, 0x72, 0x65, 0x65, 0x2e, 0x68,
+ 0x0, 0xac, 0xe0, 0xf8, 0xcd, 0x21, 0x77, 0x70, 0xa2, 0xf6,
+ 0x6b, 0x2e, 0xb8, 0x71, 0xbb, 0xc5, 0xfd, 0xc6, 0xfc, 0x2b,
+ 0x68, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x62, 0x74,
+ 0x72, 0x65, 0x65, 0x49, 0x6e, 0x74, 0x2e, 0x68, 0x0, 0xce,
+ 0x3c, 0x54, 0x93, 0xf8, 0xca, 0xd0, 0xbc, 0x54, 0x8a, 0xe8,
+ 0xe4, 0x4e, 0x51, 0x28, 0x31, 0xd8, 0xfa, 0xc4, 0x31, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x62, 0x75, 0x69, 0x6c,
+ 0x64, 0x2e, 0x63, 0x0, 0x3c, 0x91, 0xcd, 0xcf, 0xdb, 0x7b,
+ 0x1, 0x7c, 0xbc, 0x2d, 0x5c, 0x29, 0x57, 0x1a, 0x98, 0x27,
+ 0xd, 0xe0, 0x71, 0xe6, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2e,
+ 0x63, 0x0, 0xd4, 0xc, 0x65, 0xcb, 0x92, 0x45, 0x80, 0x29,
+ 0x6a, 0xd0, 0x69, 0xa0, 0x4b, 0xf9, 0xc9, 0xe9, 0x53, 0x4e,
+ 0xca, 0xa7, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x63,
+ 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x63, 0x0,
+ 0x9e, 0x91, 0x40, 0x8, 0x5c, 0x0, 0x46, 0xed, 0x3b, 0xf6,
+ 0xf4, 0x48, 0x52, 0x20, 0x69, 0x2d, 0xca, 0x17, 0x43, 0xc5,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x63, 0x72, 0x79,
+ 0x70, 0x74, 0x6f, 0x2e, 0x63, 0x0, 0x25, 0x51, 0xe6, 0xba,
+ 0x2, 0x39, 0xf8, 0x5a, 0x35, 0x77, 0x96, 0xa8, 0xdd, 0xa8,
+ 0xca, 0x3e, 0x29, 0x70, 0x93, 0xf8, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2e,
+ 0x68, 0x0, 0xf7, 0x1f, 0x53, 0x2c, 0xdc, 0x44, 0x8f, 0xa,
+ 0x1d, 0xd5, 0xc6, 0xef, 0xf5, 0xfb, 0xd3, 0x3a, 0x91, 0x55,
+ 0xaa, 0x97, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x63,
+ 0x72, 0x79, 0x70, 0x74, 0x6f, 0x5f, 0x63, 0x63, 0x2e, 0x63,
+ 0x0, 0x53, 0x7d, 0xf7, 0xe3, 0xb3, 0x6a, 0xb5, 0xcf, 0xdd,
+ 0x6f, 0xca, 0x40, 0x28, 0xeb, 0xca, 0xe1, 0x86, 0x87, 0xd6,
+ 0x4d, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x63, 0x72,
+ 0x79, 0x70, 0x74, 0x6f, 0x5f, 0x69, 0x6d, 0x70, 0x6c, 0x2e,
+ 0x63, 0x0, 0xa5, 0x89, 0x27, 0xc7, 0x6e, 0xf6, 0x20, 0x56,
+ 0x77, 0xbe, 0x5c, 0x1a, 0x8e, 0x80, 0xc9, 0x83, 0x56, 0xb3,
+ 0xa9, 0xd3, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x63,
+ 0x72, 0x79, 0x70, 0x74, 0x6f, 0x5f, 0x6c, 0x69, 0x62, 0x74,
+ 0x6f, 0x6d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x2e, 0x63, 0x0,
+ 0x1a, 0x33, 0x83, 0xe0, 0x1, 0xa7, 0x21, 0x11, 0xc3, 0xf6,
+ 0x61, 0x92, 0x22, 0xb0, 0x65, 0xf4, 0xbd, 0x1, 0xb, 0xe1,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x63, 0x72, 0x79,
+ 0x70, 0x74, 0x6f, 0x5f, 0x6f, 0x70, 0x65, 0x6e, 0x73, 0x73,
+ 0x6c, 0x2e, 0x63, 0x0, 0xd0, 0x19, 0x81, 0x3b, 0x47, 0x6c,
+ 0x52, 0xd0, 0x20, 0xe2, 0xc0, 0xac, 0xd5, 0x24, 0xe9, 0xea,
+ 0x3d, 0xf, 0xb9, 0xfe, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x63, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x63, 0x0, 0x60,
+ 0x59, 0x5f, 0xf8, 0x8d, 0x92, 0xf7, 0x8, 0x26, 0x4, 0xfb,
+ 0xd9, 0xdf, 0x9a, 0xfe, 0xa1, 0x6a, 0xe8, 0x6f, 0xf, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x64, 0x61, 0x74, 0x65,
+ 0x2e, 0x63, 0x0, 0x75, 0x8d, 0xd7, 0xc8, 0x9b, 0xca, 0x39,
+ 0x37, 0xa9, 0xd, 0x70, 0x6e, 0xa9, 0x82, 0xce, 0x3a, 0xcf,
+ 0x11, 0xd1, 0x83, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x63, 0x0, 0x63,
+ 0x4e, 0x11, 0x55, 0x63, 0xae, 0x12, 0xba, 0x65, 0x58, 0xcc,
+ 0xc5, 0x12, 0xae, 0xd6, 0x31, 0xc0, 0x66, 0xba, 0xd8, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x65, 0x78, 0x70, 0x72,
+ 0x2e, 0x63, 0x0, 0x66, 0x3, 0x97, 0xe0, 0x78, 0xae, 0x48,
+ 0xb2, 0xe7, 0x17, 0x5e, 0x33, 0x85, 0x67, 0x78, 0x19, 0x72,
+ 0x2d, 0xdd, 0x6c, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x63, 0x0, 0xc3, 0x2,
+ 0x8c, 0x4f, 0x93, 0x6e, 0xdf, 0x96, 0x71, 0x2d, 0xbe, 0x73,
+ 0xa0, 0x76, 0x62, 0xf0, 0xa2, 0x6b, 0x1d, 0xa, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x66, 0x6b, 0x65, 0x79, 0x2e,
+ 0x63, 0x0, 0xac, 0x35, 0xbc, 0x19, 0x4c, 0xde, 0xb1, 0x27,
+ 0x98, 0x9b, 0x9, 0x40, 0x35, 0xce, 0xe0, 0x6f, 0x57, 0x37,
+ 0x6f, 0x5e, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x66,
+ 0x75, 0x6e, 0x63, 0x2e, 0x63, 0x0, 0xc0, 0x2f, 0x9, 0x6a,
+ 0xda, 0xd5, 0xbc, 0xe9, 0xac, 0x83, 0xd3, 0x5f, 0xf, 0x46,
+ 0x9, 0xd6, 0xf6, 0xd4, 0x3b, 0xe5, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2e,
+ 0x63, 0x0, 0x7b, 0x2, 0xcf, 0x21, 0x30, 0xe0, 0xd1, 0xa7,
+ 0xb8, 0x89, 0xd8, 0x44, 0xc, 0xcc, 0x82, 0x8, 0xf7, 0xb6,
+ 0x7b, 0xf9, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x68,
+ 0x61, 0x73, 0x68, 0x2e, 0x63, 0x0, 0xe8, 0x1d, 0xcf, 0x95,
+ 0xe4, 0x38, 0x48, 0xfa, 0x70, 0x86, 0xb7, 0xf7, 0x81, 0xc0,
+ 0x90, 0xad, 0xc7, 0xe6, 0xca, 0x8e, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x68, 0x61, 0x73, 0x68, 0x2e, 0x68, 0x0,
+ 0x82, 0xb7, 0xc5, 0x8c, 0x71, 0x9, 0xb, 0x54, 0x7e, 0x10,
+ 0x17, 0x42, 0xaa, 0x9, 0x51, 0x73, 0x9f, 0xf2, 0xee, 0xe7,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x68, 0x77, 0x74,
+ 0x69, 0x6d, 0x65, 0x2e, 0x68, 0x0, 0xb8, 0xbc, 0x5a, 0x29,
+ 0x5b, 0xe3, 0xfa, 0xc8, 0x35, 0x1f, 0xa9, 0xf0, 0x8a, 0x77,
+ 0x57, 0x9d, 0x59, 0xc9, 0xa8, 0xe4, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x2e,
+ 0x63, 0x0, 0x9a, 0x56, 0x61, 0xf5, 0x9a, 0x72, 0x95, 0x2b,
+ 0xe6, 0xc1, 0x67, 0xa0, 0xc2, 0xdb, 0x15, 0x9b, 0x91, 0xb7,
+ 0x1f, 0xae, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x6a,
+ 0x6f, 0x75, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x63, 0x0, 0xfe,
+ 0xd2, 0x7b, 0xe3, 0xe3, 0x80, 0x55, 0xd2, 0x20, 0x43, 0x95,
+ 0xcd, 0xe6, 0xff, 0xc9, 0x45, 0x89, 0xfb, 0xf5, 0xe8, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x6c, 0x65, 0x67, 0x61,
+ 0x63, 0x79, 0x2e, 0x63, 0x0, 0x94, 0x64, 0x9a, 0xe7, 0x5,
+ 0xab, 0x93, 0x85, 0x10, 0x8d, 0xd, 0x88, 0x7a, 0xf0, 0x75,
+ 0x92, 0x89, 0xfb, 0x23, 0xcb, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x6c, 0x65, 0x6d, 0x70, 0x61, 0x72, 0x2e, 0x63,
+ 0x0, 0x2a, 0xfa, 0xa6, 0xce, 0xa6, 0xd8, 0x29, 0x60, 0x2c,
+ 0x27, 0x86, 0xc1, 0xf8, 0xa3, 0x7f, 0x56, 0x7c, 0xf6, 0xfd,
+ 0x53, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x6c, 0x6f,
+ 0x61, 0x64, 0x65, 0x78, 0x74, 0x2e, 0x63, 0x0, 0xcd, 0xcf,
+ 0x6a, 0x93, 0xb8, 0xc4, 0xf, 0x91, 0x4b, 0x94, 0x24, 0xe,
+ 0xf1, 0x4c, 0xb4, 0xa3, 0xa, 0x37, 0xec, 0xa1, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d, 0x61, 0x69, 0x6e, 0x2e,
+ 0x63, 0x0, 0x39, 0xf6, 0x4, 0x21, 0xe6, 0x81, 0x27, 0x7c,
+ 0xc3, 0xdb, 0xa0, 0x9a, 0xbe, 0x7c, 0xf7, 0x90, 0xd5, 0x28,
+ 0xf5, 0xc3, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d,
+ 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x2e, 0x63, 0x0, 0x35, 0xa4,
+ 0x4e, 0x5f, 0x61, 0xc2, 0xe4, 0x4c, 0x48, 0x1c, 0x62, 0x51,
+ 0xbd, 0xa, 0xae, 0x7a, 0xcd, 0xa4, 0xde, 0xb, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d, 0x65, 0x6d, 0x30, 0x2e,
+ 0x63, 0x0, 0xd, 0xb, 0x66, 0x67, 0xd6, 0xa, 0x95, 0x5a,
+ 0x6, 0x96, 0xdf, 0x62, 0x89, 0xb4, 0x91, 0x78, 0x96, 0x93,
+ 0x43, 0xaa, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d,
+ 0x65, 0x6d, 0x31, 0x2e, 0x63, 0x0, 0x35, 0x78, 0x49, 0x6f,
+ 0x33, 0x3, 0x7, 0xb2, 0x31, 0xdf, 0xb5, 0x3c, 0xc, 0x2e,
+ 0x1c, 0x6b, 0x32, 0x3d, 0x79, 0x1e, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x6d, 0x65, 0x6d, 0x32, 0x2e, 0x63, 0x0,
+ 0x26, 0x44, 0x8e, 0xa8, 0xaa, 0xe0, 0x36, 0x6a, 0xf0, 0x54,
+ 0x1a, 0xfe, 0xa4, 0x79, 0xb, 0x42, 0xf4, 0xa6, 0x9b, 0x5a,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d, 0x65, 0x6d,
+ 0x33, 0x2e, 0x63, 0x0, 0x1a, 0x1b, 0x79, 0x1f, 0x28, 0xf8,
+ 0xcf, 0x3c, 0xe4, 0xf9, 0xa3, 0x5c, 0xda, 0xd7, 0xb7, 0x10,
+ 0x75, 0x68, 0xc7, 0x15, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x6d, 0x65, 0x6d, 0x35, 0x2e, 0x63, 0x0, 0x78, 0x3c,
+ 0xef, 0x61, 0x76, 0xc5, 0x9c, 0xbf, 0x30, 0x91, 0x46, 0x31,
+ 0x9, 0x5a, 0x1a, 0x54, 0xf4, 0xe4, 0x2e, 0x8, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d, 0x65, 0x6d, 0x6a, 0x6f,
+ 0x75, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x63, 0x0, 0x5, 0x72,
+ 0x59, 0x48, 0xf6, 0x5d, 0x42, 0x7b, 0x7, 0xf7, 0xf9, 0x29,
+ 0xac, 0xa3, 0xff, 0x22, 0x4b, 0x17, 0x53, 0xdf, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d, 0x75, 0x74, 0x65, 0x78,
+ 0x2e, 0x63, 0x0, 0xb5, 0x67, 0xe7, 0xc2, 0x7e, 0xf2, 0x4,
+ 0x10, 0x86, 0xaf, 0xe0, 0xf6, 0x96, 0x66, 0xe2, 0x7b, 0xf5,
+ 0x9, 0x8a, 0x59, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x6d, 0x75, 0x74, 0x65, 0x78, 0x2e, 0x68, 0x0, 0x9, 0x78,
+ 0x81, 0x22, 0x52, 0x77, 0x89, 0xa, 0x9c, 0x36, 0xc2, 0x4d,
+ 0x41, 0xf6, 0x11, 0x4d, 0x64, 0xc0, 0x6d, 0xb3, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d, 0x75, 0x74, 0x65, 0x78,
+ 0x5f, 0x6e, 0x6f, 0x6f, 0x70, 0x2e, 0x63, 0x0, 0x45, 0x6e,
+ 0x82, 0xa2, 0x5e, 0x27, 0x1b, 0x6, 0x14, 0xe7, 0xf4, 0xf8,
+ 0x3c, 0x22, 0x85, 0x53, 0xb7, 0xfa, 0x1, 0x58, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d, 0x75, 0x74, 0x65, 0x78,
+ 0x5f, 0x75, 0x6e, 0x69, 0x78, 0x2e, 0x63, 0x0, 0xec, 0xa7,
+ 0x29, 0x58, 0x31, 0xc2, 0xf0, 0xee, 0x48, 0xba, 0x54, 0xd0,
+ 0x62, 0x91, 0x4d, 0x6, 0xa1, 0xdd, 0x8e, 0xbe, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x6d, 0x75, 0x74, 0x65, 0x78,
+ 0x5f, 0x77, 0x33, 0x32, 0x2e, 0x63, 0x0, 0x27, 0xd1, 0xa,
+ 0xf5, 0xbd, 0x33, 0x1b, 0xdb, 0x97, 0x3f, 0x61, 0x45, 0xb7,
+ 0x4f, 0x72, 0xb6, 0x7, 0xcf, 0xc4, 0x6e, 0x31, 0x30, 0x30,
+ 0x36, 0x34, 0x34, 0x20, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x79,
+ 0x2e, 0x63, 0x0, 0xfc, 0xab, 0x5b, 0xfa, 0xf0, 0x19, 0x8,
+ 0xd3, 0xde, 0x93, 0xfa, 0x88, 0xb5, 0xea, 0xe9, 0xe9, 0x6c,
+ 0xa3, 0xc8, 0xe8, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x6f, 0x73, 0x2e, 0x63, 0x0, 0xbe, 0x2e, 0xa4, 0xcf, 0xc0,
+ 0x19, 0x59, 0x93, 0xa3, 0x40, 0xc9, 0x2, 0xae, 0xdd, 0xf1,
+ 0xbe, 0x4b, 0x8e, 0xd7, 0x3a, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x6f, 0x73, 0x2e, 0x68, 0x0, 0x7, 0xa, 0x2d,
+ 0xdd, 0x17, 0xf7, 0x71, 0xf9, 0x8f, 0xf8, 0xcc, 0xd6, 0xf0,
+ 0x33, 0xbd, 0xac, 0xc5, 0xe9, 0xf6, 0xc, 0x31, 0x30, 0x30,
+ 0x36, 0x34, 0x34, 0x20, 0x6f, 0x73, 0x5f, 0x63, 0x6f, 0x6d,
+ 0x6d, 0x6f, 0x6e, 0x2e, 0x68, 0x0, 0xf6, 0xc3, 0xe7, 0xff,
+ 0x89, 0x46, 0x30, 0x86, 0x40, 0x18, 0x22, 0xf4, 0x81, 0xe7,
+ 0xe3, 0xb8, 0x7b, 0x2c, 0x78, 0xc7, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x6f, 0x73, 0x5f, 0x75, 0x6e, 0x69, 0x78,
+ 0x2e, 0x63, 0x0, 0xab, 0xc2, 0x3a, 0x45, 0x2e, 0x72, 0xf7,
+ 0x1c, 0x76, 0xaf, 0xa9, 0x98, 0x3c, 0x3a, 0xd9, 0xd4, 0x25,
+ 0x61, 0x6c, 0x6d, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x6f, 0x73, 0x5f, 0x77, 0x69, 0x6e, 0x2e, 0x63, 0x0, 0xae,
+ 0xb0, 0x88, 0x14, 0xb3, 0xda, 0xbe, 0x81, 0xb8, 0x4c, 0xda,
+ 0x91, 0x85, 0x82, 0xb0, 0xf, 0xfd, 0x86, 0xe4, 0x87, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x70, 0x61, 0x67, 0x65,
+ 0x72, 0x2e, 0x63, 0x0, 0x61, 0x72, 0x7f, 0xaa, 0x9c, 0xf,
+ 0x3d, 0x56, 0x62, 0x65, 0xbe, 0x7e, 0xec, 0x5b, 0x2a, 0x35,
+ 0xf6, 0xa4, 0xbc, 0x9f, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x70, 0x61, 0x67, 0x65, 0x72, 0x2e, 0x68, 0x0, 0x6f,
+ 0x65, 0x91, 0x36, 0xe2, 0x76, 0x7, 0x9d, 0xa4, 0x3a, 0x2e,
+ 0x39, 0xe1, 0xb6, 0x86, 0x37, 0xec, 0xad, 0xcf, 0x68, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x70, 0x61, 0x72, 0x73,
+ 0x65, 0x2e, 0x79, 0x0, 0x83, 0x10, 0xb2, 0x69, 0x89, 0xb0,
+ 0x5b, 0xed, 0x1e, 0x1b, 0x3, 0xda, 0x80, 0xf5, 0xc0, 0xa5,
+ 0x2e, 0x9a, 0xd1, 0xd2, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x70, 0x63, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x63, 0x0,
+ 0x48, 0x2a, 0x18, 0x8b, 0xee, 0x19, 0x91, 0xbc, 0x8a, 0xda,
+ 0xc9, 0x6a, 0x19, 0x3a, 0x53, 0xe5, 0x46, 0x2a, 0x8c, 0x10,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x70, 0x63, 0x61,
+ 0x63, 0x68, 0x65, 0x2e, 0x68, 0x0, 0xf4, 0xd4, 0xad, 0x71,
+ 0xc1, 0xd, 0x78, 0xc6, 0xda, 0xbd, 0xe2, 0x52, 0x15, 0xcd,
+ 0x41, 0x5a, 0x76, 0x1, 0x48, 0xca, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x70, 0x63, 0x61, 0x63, 0x68, 0x65, 0x31,
+ 0x2e, 0x63, 0x0, 0x41, 0x47, 0xd2, 0xef, 0xf5, 0x5b, 0xdd,
+ 0x9f, 0xf7, 0xc6, 0x86, 0xc, 0x60, 0x18, 0x10, 0x20, 0x16,
+ 0x6c, 0x5f, 0x50, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x70, 0x72, 0x61, 0x67, 0x6d, 0x61, 0x2e, 0x63, 0x0, 0x22,
+ 0x97, 0x71, 0x69, 0x61, 0x7d, 0x49, 0x22, 0xb3, 0x99, 0x3f,
+ 0x76, 0x9d, 0x90, 0xfa, 0x7b, 0xc4, 0x41, 0xea, 0x50, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x70, 0x72, 0x65, 0x70,
+ 0x61, 0x72, 0x65, 0x2e, 0x63, 0x0, 0xd7, 0x8d, 0x83, 0xcb,
+ 0xd8, 0x78, 0x97, 0xf5, 0x73, 0x30, 0x3f, 0x9f, 0x57, 0xab,
+ 0x8d, 0xe0, 0x24, 0xa6, 0xe3, 0xf8, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x66, 0x2e,
+ 0x63, 0x0, 0x9f, 0x68, 0xd2, 0x4, 0xff, 0xdc, 0x9f, 0x3d,
+ 0x42, 0x7f, 0x80, 0xa8, 0x23, 0x9a, 0x7f, 0xa3, 0xa9, 0x8a,
+ 0xec, 0xbd, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x72,
+ 0x61, 0x6e, 0x64, 0x6f, 0x6d, 0x2e, 0x63, 0x0, 0x23, 0x4e,
+ 0xbd, 0xf6, 0x58, 0xf4, 0x36, 0xcc, 0x7c, 0x68, 0xf0, 0x27,
+ 0xc4, 0x8b, 0xe, 0x1b, 0x9b, 0xa3, 0x4e, 0x98, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c,
+ 0x76, 0x65, 0x2e, 0x63, 0x0, 0x91, 0xef, 0xca, 0xa1, 0xa1,
+ 0x6b, 0xfc, 0x98, 0xfb, 0x35, 0xd8, 0x5c, 0xad, 0x15, 0x6b,
+ 0x93, 0x53, 0x3e, 0x4e, 0x6, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x65, 0x74, 0x2e, 0x63,
+ 0x0, 0x57, 0x61, 0xf9, 0x85, 0x50, 0xb1, 0x76, 0xcc, 0xe1,
+ 0x1d, 0xcb, 0xce, 0xc9, 0x38, 0x99, 0xa0, 0x75, 0xbb, 0x64,
+ 0xfd, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x73, 0x65,
+ 0x6c, 0x65, 0x63, 0x74, 0x2e, 0x63, 0x0, 0xf3, 0xf1, 0x49,
+ 0x9, 0x63, 0x95, 0x5b, 0x8e, 0xd0, 0xc9, 0xfe, 0x6e, 0x1e,
+ 0xec, 0x83, 0x6c, 0x1a, 0x52, 0x94, 0xb4, 0x31, 0x30, 0x30,
+ 0x36, 0x34, 0x34, 0x20, 0x73, 0x68, 0x65, 0x6c, 0x6c, 0x2e,
+ 0x63, 0x0, 0x1b, 0xe2, 0x87, 0x1f, 0xed, 0x9a, 0x1f, 0xdf,
+ 0x1d, 0xf7, 0x19, 0x8e, 0x11, 0x25, 0x36, 0x0, 0xec, 0xba,
+ 0x76, 0xcc, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x73,
+ 0x71, 0x6c, 0x63, 0x69, 0x70, 0x68, 0x65, 0x72, 0x2e, 0x68,
+ 0x0, 0x82, 0x75, 0x30, 0x95, 0xcd, 0x17, 0x23, 0xc5, 0xff,
+ 0x4f, 0x11, 0x15, 0xe4, 0x97, 0x55, 0x91, 0xee, 0x34, 0xf5,
+ 0xce, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x73, 0x71,
+ 0x6c, 0x69, 0x74, 0x65, 0x2e, 0x68, 0x2e, 0x69, 0x6e, 0x0,
+ 0x66, 0x8, 0x82, 0x31, 0x75, 0xde, 0x5b, 0x6a, 0xd, 0x37,
+ 0x8f, 0xdb, 0xc, 0x38, 0x18, 0xb6, 0xab, 0x4f, 0xbf, 0x8e,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x73, 0x71, 0x6c,
+ 0x69, 0x74, 0x65, 0x33, 0x2e, 0x72, 0x63, 0x0, 0x96, 0x98,
+ 0x76, 0xda, 0x1e, 0x57, 0x14, 0x3d, 0xe0, 0xb4, 0xd1, 0xc7,
+ 0x62, 0x9f, 0xd3, 0x35, 0x6f, 0x2e, 0x1c, 0x96, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x73, 0x71, 0x6c, 0x69, 0x74,
+ 0x65, 0x33, 0x65, 0x78, 0x74, 0x2e, 0x68, 0x0, 0x92, 0x8b,
+ 0xb3, 0xba, 0xd9, 0xdd, 0x64, 0x3c, 0x30, 0x1d, 0xd2, 0xb0,
+ 0xac, 0x22, 0x28, 0x7a, 0x81, 0x28, 0x48, 0x84, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x73, 0x71, 0x6c, 0x69, 0x74,
+ 0x65, 0x49, 0x6e, 0x74, 0x2e, 0x68, 0x0, 0x59, 0x50, 0xf2,
+ 0x37, 0xd9, 0xf9, 0xf2, 0xd3, 0xef, 0x6b, 0xd8, 0xbe, 0x34,
+ 0x2d, 0xcf, 0x64, 0x89, 0x22, 0x51, 0x42, 0x31, 0x30, 0x30,
+ 0x36, 0x34, 0x34, 0x20, 0x73, 0x71, 0x6c, 0x69, 0x74, 0x65,
+ 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x2e, 0x68, 0x0, 0xc7, 0xae,
+ 0xe5, 0x3c, 0xeb, 0xca, 0x94, 0xda, 0x51, 0xe7, 0x1a, 0x82,
+ 0x2e, 0xa5, 0xa6, 0xde, 0xb9, 0x3, 0x85, 0xdf, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x73, 0x74, 0x61, 0x74, 0x75,
+ 0x73, 0x2e, 0x63, 0x0, 0x28, 0x34, 0x9e, 0x6d, 0x3d, 0x20,
+ 0x88, 0xe0, 0x0, 0x3b, 0x76, 0xf8, 0xa, 0x89, 0x54, 0xfa,
+ 0xec, 0x59, 0x30, 0xba, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x2e, 0x63, 0x0, 0x26,
+ 0xbb, 0xfb, 0x4f, 0x45, 0x6c, 0x42, 0x98, 0x25, 0x29, 0xea,
+ 0x1a, 0x63, 0xa0, 0x17, 0x51, 0xdd, 0x3e, 0xe9, 0x5a, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x63, 0x6c, 0x73,
+ 0x71, 0x6c, 0x69, 0x74, 0x65, 0x2e, 0x63, 0x0, 0xf1, 0xbb,
+ 0x29, 0x21, 0xda, 0xc, 0x68, 0xa4, 0xf1, 0xc8, 0xe1, 0x5c,
+ 0xf5, 0x66, 0xb2, 0x33, 0xe9, 0x2a, 0x51, 0x9f, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x31,
+ 0x2e, 0x63, 0x0, 0xa6, 0x38, 0xe4, 0x80, 0xad, 0xdf, 0x14,
+ 0x43, 0x9c, 0xdf, 0xa4, 0xee, 0x16, 0x4d, 0xc3, 0x1b, 0x79,
+ 0xf8, 0xbc, 0xac, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x74, 0x65, 0x73, 0x74, 0x32, 0x2e, 0x63, 0x0, 0xd1, 0x30,
+ 0xe9, 0xd0, 0x1b, 0x70, 0x24, 0xa5, 0xec, 0x6d, 0x73, 0x5,
+ 0x92, 0xee, 0x4d, 0x1f, 0xb0, 0x2c, 0xfd, 0xb4, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x33,
+ 0x2e, 0x63, 0x0, 0xe3, 0xed, 0x31, 0xc, 0x81, 0x4, 0xfe,
+ 0x36, 0x21, 0xce, 0xbb, 0xf, 0x51, 0xd1, 0x1, 0x45, 0x1,
+ 0x8d, 0x4f, 0xac, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x74, 0x65, 0x73, 0x74, 0x34, 0x2e, 0x63, 0x0, 0xa6, 0x37,
+ 0x5c, 0x7c, 0xc4, 0x3, 0xf6, 0xc, 0xaa, 0xb7, 0xe9, 0x59,
+ 0x53, 0x3e, 0x3d, 0xb1, 0xff, 0x75, 0xa, 0xe4, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x35,
+ 0x2e, 0x63, 0x0, 0x30, 0x3d, 0x12, 0x5, 0xb2, 0x26, 0x28,
+ 0x42, 0x3d, 0x98, 0x6f, 0x71, 0xe2, 0x7c, 0x7c, 0xf7, 0x14,
+ 0xa7, 0x45, 0xa6, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x74, 0x65, 0x73, 0x74, 0x36, 0x2e, 0x63, 0x0, 0xc1, 0x51,
+ 0xea, 0x42, 0x98, 0x9b, 0xb, 0xe2, 0x4e, 0xe4, 0xb9, 0xa4,
+ 0xbe, 0x37, 0x8b, 0x4f, 0x63, 0x6d, 0xb6, 0x41, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x37,
+ 0x2e, 0x63, 0x0, 0x3c, 0xd4, 0xa2, 0x24, 0xd7, 0xe8, 0xe1,
+ 0x6b, 0xd7, 0xcb, 0xe4, 0x9e, 0x2d, 0x3e, 0x94, 0xce, 0x9b,
+ 0x17, 0xbd, 0x76, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x74, 0x65, 0x73, 0x74, 0x38, 0x2e, 0x63, 0x0, 0xc5, 0x73,
+ 0x93, 0x32, 0xd4, 0x6e, 0x57, 0x12, 0x1d, 0xa2, 0x7c, 0x3e,
+ 0x88, 0xfd, 0xe7, 0x5a, 0xeb, 0x87, 0x10, 0xf7, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x39,
+ 0x2e, 0x63, 0x0, 0xe5, 0x99, 0x3e, 0x8f, 0xf7, 0x8f, 0x61,
+ 0xc2, 0x43, 0x5b, 0x6f, 0x97, 0xa3, 0xb4, 0x63, 0xe2, 0x27,
+ 0xc7, 0x67, 0xac, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x74, 0x65, 0x73, 0x74, 0x5f, 0x61, 0x73, 0x79, 0x6e, 0x63,
+ 0x2e, 0x63, 0x0, 0xb0, 0xb9, 0x43, 0x18, 0x5b, 0xfc, 0x23,
+ 0xc1, 0x7f, 0xd0, 0x8f, 0x55, 0x76, 0x8c, 0xac, 0x12, 0xa9,
+ 0xf5, 0x69, 0x51, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x74, 0x65, 0x73, 0x74, 0x5f, 0x61, 0x75, 0x74, 0x6f, 0x65,
+ 0x78, 0x74, 0x2e, 0x63, 0x0, 0xb5, 0x1, 0x3f, 0x31, 0x73,
+ 0xa2, 0x17, 0x6e, 0x2d, 0x9f, 0xc, 0xaa, 0x99, 0x19, 0x30,
+ 0x36, 0xbf, 0xc3, 0x7e, 0x91, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x61, 0x63,
+ 0x6b, 0x75, 0x70, 0x2e, 0x63, 0x0, 0xe9, 0x67, 0x42, 0x4a,
+ 0x29, 0xf, 0x73, 0x8a, 0xec, 0xfd, 0xac, 0x57, 0x8e, 0x9b,
+ 0x87, 0xa4, 0xc4, 0xae, 0x8d, 0x7f, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x62, 0x74,
+ 0x72, 0x65, 0x65, 0x2e, 0x63, 0x0, 0xdb, 0x72, 0x88, 0x9b,
+ 0x2a, 0xfb, 0x62, 0x72, 0x82, 0x8d, 0xda, 0x86, 0x6d, 0xcc,
+ 0xf1, 0x22, 0xa4, 0x9a, 0x72, 0x99, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x63, 0x6f,
+ 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x63, 0x0, 0x53, 0x47, 0x27,
+ 0xa0, 0x80, 0x42, 0xb6, 0xca, 0xd6, 0x7e, 0x26, 0x7e, 0x87,
+ 0xb4, 0x3, 0xa4, 0x1a, 0x73, 0xb2, 0x99, 0x31, 0x30, 0x30,
+ 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x64,
+ 0x65, 0x6d, 0x6f, 0x76, 0x66, 0x73, 0x2e, 0x63, 0x0, 0x63,
+ 0x76, 0x27, 0x7, 0x1d, 0x9e, 0x28, 0xf4, 0xb3, 0x45, 0x1b,
+ 0xbb, 0xdd, 0xf8, 0x8, 0xd1, 0xa9, 0x12, 0x0, 0xf8, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74,
+ 0x5f, 0x64, 0x65, 0x76, 0x73, 0x79, 0x6d, 0x2e, 0x63, 0x0,
+ 0x21, 0xf0, 0xf6, 0x84, 0xd8, 0x61, 0x11, 0x67, 0x70, 0xde,
+ 0xfc, 0xde, 0xcd, 0x53, 0x2b, 0xa3, 0xee, 0xab, 0xa9, 0x75,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73,
+ 0x74, 0x5f, 0x66, 0x73, 0x2e, 0x63, 0x0, 0x47, 0x8c, 0xad,
+ 0x80, 0xb1, 0x6a, 0x90, 0x9b, 0x23, 0xbd, 0x3, 0xc2, 0xda,
+ 0xd8, 0xb4, 0x49, 0xa7, 0x45, 0x87, 0xa1, 0x31, 0x30, 0x30,
+ 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x66,
+ 0x75, 0x6e, 0x63, 0x2e, 0x63, 0x0, 0x6f, 0x9b, 0xb0, 0x3d,
+ 0xc8, 0x8a, 0x21, 0xd6, 0x58, 0xbf, 0x99, 0x99, 0xba, 0xf6,
+ 0x6d, 0xc1, 0xd5, 0x2e, 0xbc, 0x54, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x68, 0x65,
+ 0x78, 0x69, 0x6f, 0x2e, 0x63, 0x0, 0xb2, 0xb, 0x5c, 0xe7,
+ 0x30, 0xab, 0x7f, 0xa8, 0x0, 0xd2, 0xd0, 0xcc, 0x38, 0xc7,
+ 0x72, 0x75, 0x59, 0x3e, 0xbd, 0xbb, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x69, 0x6e,
+ 0x69, 0x74, 0x2e, 0x63, 0x0, 0xe3, 0x72, 0x4d, 0x8b, 0xe3,
+ 0x14, 0xdb, 0x9, 0xee, 0xa8, 0x4, 0xb, 0x9d, 0xdf, 0xc8,
+ 0xa8, 0xbe, 0xee, 0x22, 0x91, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x69, 0x6e, 0x74,
+ 0x61, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x63, 0x0, 0xf5, 0xc3,
+ 0xd9, 0xe4, 0x5, 0x9a, 0x16, 0x56, 0x7, 0x34, 0x7, 0xe4,
+ 0x3a, 0x92, 0x11, 0x79, 0x99, 0x69, 0x7b, 0x93, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f,
+ 0x69, 0x6e, 0x74, 0x61, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x68,
+ 0x0, 0x69, 0x13, 0x37, 0xd1, 0xae, 0xd6, 0x37, 0x15, 0xd6,
+ 0x2e, 0x76, 0x26, 0x6f, 0xf, 0x3b, 0x50, 0x8b, 0x1, 0xa,
+ 0x34, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65,
+ 0x73, 0x74, 0x5f, 0x6a, 0x6f, 0x75, 0x72, 0x6e, 0x61, 0x6c,
+ 0x2e, 0x63, 0x0, 0xe8, 0x70, 0x1a, 0x4e, 0xea, 0xdb, 0x8e,
+ 0xad, 0x16, 0x9d, 0x60, 0x6, 0x40, 0x7d, 0x54, 0xa8, 0x98,
+ 0x59, 0x2d, 0x70, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x74, 0x65, 0x73, 0x74, 0x5f, 0x6c, 0x6f, 0x61, 0x64, 0x65,
+ 0x78, 0x74, 0x2e, 0x63, 0x0, 0x11, 0x37, 0xe3, 0xa9, 0xaa,
+ 0xe9, 0x29, 0x6, 0xb8, 0x28, 0x9f, 0x6c, 0x3d, 0xaa, 0x61,
+ 0xf0, 0xd0, 0x70, 0xf5, 0x5a, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x6d, 0x61, 0x6c,
+ 0x6c, 0x6f, 0x63, 0x2e, 0x63, 0x0, 0xcf, 0x98, 0xa8, 0xfb,
+ 0x21, 0x82, 0xc0, 0xba, 0xf5, 0xa, 0xd5, 0x79, 0x79, 0xb6,
+ 0x75, 0xbb, 0x70, 0x7a, 0x93, 0xb0, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x6d, 0x75,
+ 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x78, 0x2e, 0x63, 0x0,
+ 0x62, 0x45, 0x41, 0xb3, 0x2a, 0x10, 0xd2, 0x1a, 0x2f, 0xd1,
+ 0xa, 0x35, 0xee, 0x66, 0x32, 0xbd, 0xac, 0x55, 0x2d, 0x41,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73,
+ 0x74, 0x5f, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65,
+ 0x78, 0x2e, 0x68, 0x0, 0xb7, 0xe1, 0xaf, 0xea, 0x5f, 0xd7,
+ 0x8b, 0x87, 0x58, 0x2, 0x65, 0xf8, 0x4c, 0x81, 0x61, 0x2c,
+ 0xbd, 0x2, 0x5b, 0xaf, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x6d, 0x75, 0x74, 0x65,
+ 0x78, 0x2e, 0x63, 0x0, 0xc9, 0xb4, 0xa2, 0x9a, 0xb7, 0x5c,
+ 0x77, 0xea, 0x5f, 0x36, 0xb5, 0x19, 0x32, 0x56, 0xd7, 0xf,
+ 0xe6, 0x58, 0xe, 0x95, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x6f, 0x6e, 0x65, 0x66,
+ 0x69, 0x6c, 0x65, 0x2e, 0x63, 0x0, 0x69, 0x86, 0x74, 0x41,
+ 0xb8, 0xcc, 0x9a, 0x62, 0x1a, 0xf3, 0x24, 0x13, 0xfc, 0x63,
+ 0xda, 0x80, 0x99, 0x37, 0x64, 0xf4, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x6f, 0x73,
+ 0x69, 0x6e, 0x73, 0x74, 0x2e, 0x63, 0x0, 0x53, 0x14, 0x33,
+ 0x31, 0x3e, 0xe3, 0x6c, 0x7, 0xeb, 0x21, 0xc0, 0x2f, 0x31,
+ 0x15, 0xcb, 0x7a, 0x37, 0x48, 0x6c, 0x79, 0x31, 0x30, 0x30,
+ 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x70,
+ 0x63, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x63, 0x0, 0x8f, 0xcf,
+ 0xe7, 0xe2, 0x6e, 0x3f, 0xf1, 0x74, 0x96, 0xb8, 0x40, 0xf5,
+ 0xd6, 0x3c, 0x75, 0x78, 0x3a, 0xff, 0x81, 0x62, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f,
+ 0x71, 0x75, 0x6f, 0x74, 0x61, 0x2e, 0x63, 0x0, 0xe5, 0x90,
+ 0x99, 0x6c, 0xa4, 0xb8, 0x57, 0x4a, 0xb1, 0xe4, 0x18, 0x5d,
+ 0x57, 0x77, 0x56, 0x66, 0x4a, 0xd2, 0x49, 0x5f, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f,
+ 0x71, 0x75, 0x6f, 0x74, 0x61, 0x2e, 0x68, 0x0, 0x2d, 0x7,
+ 0x67, 0xa1, 0x9a, 0xb7, 0xc3, 0xa4, 0x21, 0xcd, 0xba, 0x6a,
+ 0x3, 0x49, 0x20, 0x43, 0x67, 0xc2, 0x2c, 0x81, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f,
+ 0x72, 0x74, 0x72, 0x65, 0x65, 0x2e, 0x63, 0x0, 0xf5, 0x4a,
+ 0xe9, 0xb0, 0x63, 0xbb, 0x73, 0x71, 0x2f, 0xcf, 0xc1, 0xc6,
+ 0x83, 0x2e, 0x2a, 0x50, 0xf6, 0x2a, 0x97, 0xe7, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f,
+ 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x63, 0x0, 0x12,
+ 0x64, 0x44, 0x67, 0x64, 0x7d, 0x51, 0x39, 0x4a, 0x1, 0xf9,
+ 0xfa, 0x60, 0x37, 0x62, 0x98, 0x18, 0x54, 0x66, 0xfd, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74,
+ 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x63, 0x0,
+ 0xed, 0x8, 0x18, 0xe6, 0xf6, 0x5f, 0x27, 0x28, 0x2d, 0xc7,
+ 0xb1, 0xc1, 0x90, 0xec, 0x18, 0x8c, 0x89, 0x33, 0x0, 0x2b,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73,
+ 0x74, 0x5f, 0x73, 0x71, 0x6c, 0x6c, 0x6f, 0x67, 0x2e, 0x63,
+ 0x0, 0x4a, 0xa6, 0x8b, 0x7c, 0x42, 0x93, 0x23, 0xb8, 0xee,
+ 0xbe, 0x6c, 0x9c, 0x2d, 0x7, 0xfc, 0x66, 0xd, 0x8d, 0x47,
+ 0xc9, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65,
+ 0x73, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x2e, 0x63, 0x0,
+ 0xd4, 0xc9, 0x2, 0xb5, 0xea, 0x11, 0x1a, 0xd5, 0x8a, 0x73,
+ 0x71, 0x12, 0xc2, 0x8f, 0x0, 0x38, 0x43, 0x4c, 0x85, 0xc0,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73,
+ 0x74, 0x5f, 0x73, 0x75, 0x70, 0x65, 0x72, 0x6c, 0x6f, 0x63,
+ 0x6b, 0x2e, 0x63, 0x0, 0x93, 0x6f, 0xca, 0xd0, 0xc5, 0x6f,
+ 0x6b, 0xc8, 0x58, 0x9, 0x74, 0x2f, 0x6a, 0xe1, 0xc1, 0xee,
+ 0xb8, 0xb7, 0xd2, 0xf1, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34,
+ 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x73, 0x79, 0x73, 0x63,
+ 0x61, 0x6c, 0x6c, 0x2e, 0x63, 0x0, 0x7c, 0x8, 0x73, 0xc1,
+ 0x6d, 0x84, 0x32, 0x2, 0xf3, 0xe, 0x2d, 0xb9, 0x45, 0x9f,
+ 0xa2, 0x99, 0x75, 0xea, 0x5e, 0x68, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x74, 0x63,
+ 0x6c, 0x76, 0x61, 0x72, 0x2e, 0x63, 0x0, 0x12, 0x19, 0x19,
+ 0xc, 0x3, 0x0, 0xfd, 0x5e, 0xc7, 0xa3, 0xc5, 0x84, 0x8,
+ 0xf3, 0x38, 0x43, 0xd2, 0xe, 0xee, 0x15, 0x31, 0x30, 0x30,
+ 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x74,
+ 0x68, 0x72, 0x65, 0x61, 0x64, 0x2e, 0x63, 0x0, 0x2f, 0x93,
+ 0x63, 0xb7, 0x50, 0x1e, 0x51, 0x19, 0x81, 0xfe, 0x32, 0x83,
+ 0x1f, 0xf2, 0xe8, 0xfd, 0x2f, 0x30, 0xc4, 0x93, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f,
+ 0x76, 0x66, 0x73, 0x2e, 0x63, 0x0, 0xfc, 0xd5, 0x77, 0x43,
+ 0x9c, 0xfd, 0x6c, 0x72, 0xdd, 0xe4, 0x83, 0x58, 0x92, 0x14,
+ 0x20, 0xcf, 0x6e, 0xf1, 0xf8, 0x6d, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x76, 0x66,
+ 0x73, 0x74, 0x72, 0x61, 0x63, 0x65, 0x2e, 0x63, 0x0, 0xa,
+ 0xac, 0xc0, 0x1f, 0xe4, 0x2e, 0x77, 0xfe, 0xb8, 0x58, 0xe4,
+ 0xbe, 0xd0, 0xcb, 0x7e, 0x4, 0xa4, 0x35, 0xb2, 0x10, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x74, 0x65, 0x73, 0x74,
+ 0x5f, 0x77, 0x73, 0x64, 0x2e, 0x63, 0x0, 0x99, 0xe4, 0xa0,
+ 0x56, 0x58, 0x1f, 0x58, 0xf4, 0x53, 0x6f, 0xdb, 0x5a, 0x5d,
+ 0xf7, 0x5c, 0x74, 0x69, 0x8a, 0x81, 0x62, 0x31, 0x30, 0x30,
+ 0x36, 0x34, 0x34, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x69,
+ 0x7a, 0x65, 0x2e, 0x63, 0x0, 0xfa, 0xea, 0x5f, 0x26, 0xc7,
+ 0x9c, 0x5e, 0x18, 0x8f, 0xa8, 0x7f, 0x2f, 0xdf, 0x6f, 0xf7,
+ 0x6a, 0x7a, 0x60, 0x6, 0xc5, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x2e,
+ 0x63, 0x0, 0xf1, 0xff, 0x76, 0x6e, 0x20, 0x2a, 0x45, 0x18,
+ 0xec, 0x10, 0xe5, 0x27, 0x12, 0xc, 0xd3, 0xe, 0x83, 0xfb,
+ 0xd0, 0x34, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x75,
+ 0x70, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x63, 0x0, 0x3a, 0xb1,
+ 0xab, 0x2a, 0x4b, 0x65, 0xda, 0x3f, 0x19, 0x8c, 0x15, 0x84,
+ 0xd5, 0x4d, 0x36, 0xf1, 0x8c, 0xa1, 0x21, 0x4a, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x75, 0x74, 0x66, 0x2e, 0x63,
+ 0x0, 0x6d, 0x5b, 0x1b, 0xfe, 0x40, 0xc, 0x37, 0x48, 0xaa,
+ 0x70, 0xa3, 0xb2, 0xfd, 0x5e, 0xe, 0xac, 0x5f, 0xc0, 0x4d,
+ 0xe2, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x75, 0x74,
+ 0x69, 0x6c, 0x2e, 0x63, 0x0, 0xd8, 0x3a, 0x63, 0x1, 0x5f,
+ 0xd8, 0x7d, 0xcc, 0x4f, 0xb4, 0x41, 0x66, 0xfa, 0xbf, 0x2e,
+ 0x9b, 0xc9, 0x67, 0x1e, 0xb8, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x76, 0x61, 0x63, 0x75, 0x75, 0x6d, 0x2e, 0x63,
+ 0x0, 0x4a, 0xfb, 0x2c, 0xca, 0x64, 0xdd, 0x60, 0x76, 0x11,
+ 0x22, 0x2c, 0x7, 0x93, 0x2d, 0x12, 0xea, 0xcf, 0xa, 0x2c,
+ 0x22, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x76, 0x64,
+ 0x62, 0x65, 0x2e, 0x63, 0x0, 0xf3, 0x43, 0xe1, 0x3d, 0x4e,
+ 0x91, 0x78, 0x4b, 0x15, 0x88, 0x10, 0xc5, 0xb7, 0xd4, 0x46,
+ 0x84, 0xdf, 0xbf, 0xa2, 0xa5, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x76, 0x64, 0x62, 0x65, 0x2e, 0x68, 0x0, 0xfa,
+ 0x7b, 0x31, 0xb7, 0x27, 0xa, 0x90, 0xd4, 0xf6, 0x37, 0x36,
+ 0x5a, 0xfc, 0xc9, 0xbd, 0xa1, 0xd1, 0xb1, 0xe1, 0xd6, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x76, 0x64, 0x62, 0x65,
+ 0x49, 0x6e, 0x74, 0x2e, 0x68, 0x0, 0x3a, 0x5b, 0x40, 0x28,
+ 0xbb, 0xd6, 0xc9, 0x56, 0x10, 0xd7, 0xc, 0xce, 0x3, 0x69,
+ 0xdf, 0xcd, 0x60, 0x7a, 0xa9, 0x0, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x76, 0x64, 0x62, 0x65, 0x61, 0x70, 0x69,
+ 0x2e, 0x63, 0x0, 0x7c, 0x86, 0x1e, 0x2d, 0x47, 0x21, 0x8c,
+ 0x91, 0x63, 0x31, 0x77, 0x77, 0xc3, 0x7, 0x21, 0x99, 0xe9,
+ 0xb4, 0x2, 0x80, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x76, 0x64, 0x62, 0x65, 0x61, 0x75, 0x78, 0x2e, 0x63, 0x0,
+ 0x2c, 0x42, 0x69, 0xa5, 0x9e, 0x6d, 0xbc, 0xe8, 0x67, 0x1c,
+ 0x47, 0x4f, 0x34, 0x61, 0x90, 0xbe, 0x2a, 0xe, 0x18, 0x51,
+ 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x76, 0x64, 0x62,
+ 0x65, 0x62, 0x6c, 0x6f, 0x62, 0x2e, 0x63, 0x0, 0x2e, 0x8f,
+ 0xd8, 0xee, 0x74, 0x47, 0xe6, 0x46, 0x46, 0xe3, 0x49, 0x4b,
+ 0x4c, 0x4, 0x1d, 0x3a, 0x4a, 0xbb, 0x8, 0x85, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x76, 0x64, 0x62, 0x65, 0x6d,
+ 0x65, 0x6d, 0x2e, 0x63, 0x0, 0x8f, 0xc2, 0x22, 0xe2, 0xde,
+ 0x20, 0x50, 0x14, 0x50, 0xec, 0xea, 0x9d, 0x4e, 0xbf, 0xaa,
+ 0xc9, 0x81, 0x4a, 0xae, 0x59, 0x31, 0x30, 0x30, 0x36, 0x34,
+ 0x34, 0x20, 0x76, 0x64, 0x62, 0x65, 0x73, 0x6f, 0x72, 0x74,
+ 0x2e, 0x63, 0x0, 0xfd, 0xfc, 0x4a, 0x79, 0xdd, 0xc9, 0x6e,
+ 0x59, 0x9b, 0x1b, 0xe, 0xeb, 0xac, 0xbd, 0xb8, 0x45, 0xc6,
+ 0x38, 0x13, 0xb2, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20,
+ 0x76, 0x64, 0x62, 0x65, 0x74, 0x72, 0x61, 0x63, 0x65, 0x2e,
+ 0x63, 0x0, 0x35, 0x62, 0x77, 0xe8, 0xd2, 0x3b, 0xca, 0xdb,
+ 0x67, 0x6b, 0x59, 0xd1, 0xa4, 0xdc, 0xf8, 0x42, 0xfd, 0xc4,
+ 0xc9, 0x72, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x76,
+ 0x74, 0x61, 0x62, 0x2e, 0x63, 0x0, 0x95, 0x82, 0x2, 0xc3,
+ 0x1e, 0x24, 0x15, 0xb, 0x60, 0xf1, 0xa, 0x8a, 0xf, 0x74,
+ 0x41, 0xaf, 0xac, 0x3f, 0xbb, 0x1c, 0x31, 0x30, 0x30, 0x36,
+ 0x34, 0x34, 0x20, 0x77, 0x61, 0x6c, 0x2e, 0x63, 0x0, 0xe6,
+ 0x42, 0xea, 0x21, 0x5, 0xb5, 0xc5, 0x4a, 0xf3, 0x5, 0x88,
+ 0x9, 0x62, 0x69, 0xab, 0x75, 0xcb, 0xef, 0x8f, 0xf2, 0x31,
+ 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x77, 0x61, 0x6c, 0x2e,
+ 0x68, 0x0, 0x9, 0x25, 0x46, 0x35, 0x4b, 0x34, 0xc0, 0xab,
+ 0x3d, 0x20, 0x5, 0x6a, 0x7f, 0x8a, 0x8a, 0x52, 0xe4, 0xd0,
+ 0xb5, 0xf5, 0x31, 0x30, 0x30, 0x36, 0x34, 0x34, 0x20, 0x77,
+ 0x61, 0x6c, 0x6b, 0x65, 0x72, 0x2e, 0x63, 0x0, 0xe7, 0x1e,
+ 0xd2, 0xac, 0x48, 0x4c, 0x91, 0x6c, 0x1c, 0xc1, 0x0, 0x7e,
+ 0x5e, 0x5, 0xda, 0x47, 0x1c, 0xb4, 0x95, 0x99, 0x31, 0x30,
+ 0x30, 0x36, 0x34, 0x34, 0x20, 0x77, 0x68, 0x65, 0x72, 0x65,
+ 0x2e, 0x63, 0x0, 0xe6, 0x14, 0xf4, 0xa6, 0xd8, 0x64, 0xe7,
+ 0xe, 0xc4, 0x32, 0x8d, 0xb, 0xdb, 0x25, 0x4e, 0x3a, 0xc9,
+ 0xf0, 0xd2, 0x87,
+ }
+ obj := &SortReadObject{
+ t: plumbing.TreeObject,
+ h: plumbing.ZeroHash,
+ cont: cont,
+ sz: 5313,
+ }
+
+ expected := &Tree{
+ Entries: []TreeEntry{
+ {
+ Name: "alter.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xa4, 0x9d, 0x33, 0x49, 0xd7, 0xe2, 0x3f, 0xb5, 0x81, 0x19, 0x4f, 0x4c, 0xb5, 0x9a, 0xc0, 0xd5, 0x1b, 0x2, 0x1f, 0x78},
+ },
+ {
+ Name: "analyze.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x9a, 0x3e, 0x95, 0x97, 0xdb, 0xb, 0x3, 0x20, 0x77, 0xc9, 0x1d, 0x96, 0x9d, 0x22, 0xc6, 0x27, 0x3f, 0x70, 0x2a, 0xc},
+ },
+ {
+ Name: "attach.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xb8, 0xe1, 0x21, 0x99, 0xb5, 0x7d, 0xe8, 0x11, 0xea, 0xe0, 0xd0, 0x61, 0x42, 0xd5, 0xac, 0x4f, 0xd4, 0x30, 0xb1, 0xd8},
+ },
+ {
+ Name: "auth.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xd3, 0x8b, 0xb8, 0x36, 0xa7, 0x84, 0xfb, 0xfa, 0xb6, 0xab, 0x7b, 0x3, 0xd4, 0xe6, 0xdd, 0x43, 0xed, 0xc4, 0x1f, 0xa7},
+ },
+ {
+ Name: "backup.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x25, 0x2f, 0x61, 0xcf, 0xca, 0xa8, 0xfc, 0xf3, 0x13, 0x7e, 0x8, 0xed, 0x68, 0x47, 0xdc, 0xfe, 0x1d, 0xc1, 0xde, 0x54},
+ },
+ {
+ Name: "bitvec.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x52, 0x18, 0x4a, 0xa9, 0x64, 0xce, 0x18, 0x98, 0xf3, 0x5d, 0x1b, 0x3d, 0x87, 0x87, 0x1c, 0x2d, 0xe, 0xf4, 0xc5, 0x3d},
+ },
+ {
+ Name: "btmutex.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xd8, 0x7d, 0x4d, 0x5f, 0xee, 0xb6, 0x30, 0x7a, 0xec, 0xdc, 0x9a, 0x83, 0x11, 0x14, 0x89, 0xab, 0x30, 0xc6, 0x78, 0xc3},
+ },
+ {
+ Name: "btree.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x3c, 0xa6, 0x5, 0x83, 0xe3, 0xc8, 0xe3, 0x12, 0x0, 0xf9, 0x73, 0xe0, 0xe9, 0xc4, 0x53, 0x62, 0x58, 0xb2, 0x64, 0x39},
+ },
+ {
+ Name: "btree.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xac, 0xe0, 0xf8, 0xcd, 0x21, 0x77, 0x70, 0xa2, 0xf6, 0x6b, 0x2e, 0xb8, 0x71, 0xbb, 0xc5, 0xfd, 0xc6, 0xfc, 0x2b, 0x68},
+ },
+ {
+ Name: "btreeInt.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xce, 0x3c, 0x54, 0x93, 0xf8, 0xca, 0xd0, 0xbc, 0x54, 0x8a, 0xe8, 0xe4, 0x4e, 0x51, 0x28, 0x31, 0xd8, 0xfa, 0xc4, 0x31},
+ },
+ {
+ Name: "build.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x3c, 0x91, 0xcd, 0xcf, 0xdb, 0x7b, 0x1, 0x7c, 0xbc, 0x2d, 0x5c, 0x29, 0x57, 0x1a, 0x98, 0x27, 0xd, 0xe0, 0x71, 0xe6},
+ },
+ {
+ Name: "callback.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xd4, 0xc, 0x65, 0xcb, 0x92, 0x45, 0x80, 0x29, 0x6a, 0xd0, 0x69, 0xa0, 0x4b, 0xf9, 0xc9, 0xe9, 0x53, 0x4e, 0xca, 0xa7},
+ },
+ {
+ Name: "complete.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x9e, 0x91, 0x40, 0x8, 0x5c, 0x0, 0x46, 0xed, 0x3b, 0xf6, 0xf4, 0x48, 0x52, 0x20, 0x69, 0x2d, 0xca, 0x17, 0x43, 0xc5},
+ },
+ {
+ Name: "crypto.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x25, 0x51, 0xe6, 0xba, 0x2, 0x39, 0xf8, 0x5a, 0x35, 0x77, 0x96, 0xa8, 0xdd, 0xa8, 0xca, 0x3e, 0x29, 0x70, 0x93, 0xf8},
+ },
+ {
+ Name: "crypto.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xf7, 0x1f, 0x53, 0x2c, 0xdc, 0x44, 0x8f, 0xa, 0x1d, 0xd5, 0xc6, 0xef, 0xf5, 0xfb, 0xd3, 0x3a, 0x91, 0x55, 0xaa, 0x97},
+ },
+ {
+ Name: "crypto_cc.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x53, 0x7d, 0xf7, 0xe3, 0xb3, 0x6a, 0xb5, 0xcf, 0xdd, 0x6f, 0xca, 0x40, 0x28, 0xeb, 0xca, 0xe1, 0x86, 0x87, 0xd6, 0x4d},
+ },
+ {
+ Name: "crypto_impl.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xa5, 0x89, 0x27, 0xc7, 0x6e, 0xf6, 0x20, 0x56, 0x77, 0xbe, 0x5c, 0x1a, 0x8e, 0x80, 0xc9, 0x83, 0x56, 0xb3, 0xa9, 0xd3},
+ },
+ {
+ Name: "crypto_libtomcrypt.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x1a, 0x33, 0x83, 0xe0, 0x1, 0xa7, 0x21, 0x11, 0xc3, 0xf6, 0x61, 0x92, 0x22, 0xb0, 0x65, 0xf4, 0xbd, 0x1, 0xb, 0xe1},
+ },
+ {
+ Name: "crypto_openssl.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xd0, 0x19, 0x81, 0x3b, 0x47, 0x6c, 0x52, 0xd0, 0x20, 0xe2, 0xc0, 0xac, 0xd5, 0x24, 0xe9, 0xea, 0x3d, 0xf, 0xb9, 0xfe},
+ },
+ {
+ Name: "ctime.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x60, 0x59, 0x5f, 0xf8, 0x8d, 0x92, 0xf7, 0x8, 0x26, 0x4, 0xfb, 0xd9, 0xdf, 0x9a, 0xfe, 0xa1, 0x6a, 0xe8, 0x6f, 0xf},
+ },
+ {
+ Name: "date.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x75, 0x8d, 0xd7, 0xc8, 0x9b, 0xca, 0x39, 0x37, 0xa9, 0xd, 0x70, 0x6e, 0xa9, 0x82, 0xce, 0x3a, 0xcf, 0x11, 0xd1, 0x83},
+ },
+ {
+ Name: "delete.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x63, 0x4e, 0x11, 0x55, 0x63, 0xae, 0x12, 0xba, 0x65, 0x58, 0xcc, 0xc5, 0x12, 0xae, 0xd6, 0x31, 0xc0, 0x66, 0xba, 0xd8},
+ },
+ {
+ Name: "expr.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x66, 0x3, 0x97, 0xe0, 0x78, 0xae, 0x48, 0xb2, 0xe7, 0x17, 0x5e, 0x33, 0x85, 0x67, 0x78, 0x19, 0x72, 0x2d, 0xdd, 0x6c},
+ },
+ {
+ Name: "fault.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xc3, 0x2, 0x8c, 0x4f, 0x93, 0x6e, 0xdf, 0x96, 0x71, 0x2d, 0xbe, 0x73, 0xa0, 0x76, 0x62, 0xf0, 0xa2, 0x6b, 0x1d, 0xa},
+ },
+ {
+ Name: "fkey.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xac, 0x35, 0xbc, 0x19, 0x4c, 0xde, 0xb1, 0x27, 0x98, 0x9b, 0x9, 0x40, 0x35, 0xce, 0xe0, 0x6f, 0x57, 0x37, 0x6f, 0x5e},
+ },
+ {
+ Name: "func.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xc0, 0x2f, 0x9, 0x6a, 0xda, 0xd5, 0xbc, 0xe9, 0xac, 0x83, 0xd3, 0x5f, 0xf, 0x46, 0x9, 0xd6, 0xf6, 0xd4, 0x3b, 0xe5},
+ },
+ {
+ Name: "global.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x7b, 0x2, 0xcf, 0x21, 0x30, 0xe0, 0xd1, 0xa7, 0xb8, 0x89, 0xd8, 0x44, 0xc, 0xcc, 0x82, 0x8, 0xf7, 0xb6, 0x7b, 0xf9},
+ },
+ {
+ Name: "hash.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xe8, 0x1d, 0xcf, 0x95, 0xe4, 0x38, 0x48, 0xfa, 0x70, 0x86, 0xb7, 0xf7, 0x81, 0xc0, 0x90, 0xad, 0xc7, 0xe6, 0xca, 0x8e},
+ },
+ {
+ Name: "hash.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x82, 0xb7, 0xc5, 0x8c, 0x71, 0x9, 0xb, 0x54, 0x7e, 0x10, 0x17, 0x42, 0xaa, 0x9, 0x51, 0x73, 0x9f, 0xf2, 0xee, 0xe7},
+ },
+ {
+ Name: "hwtime.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xb8, 0xbc, 0x5a, 0x29, 0x5b, 0xe3, 0xfa, 0xc8, 0x35, 0x1f, 0xa9, 0xf0, 0x8a, 0x77, 0x57, 0x9d, 0x59, 0xc9, 0xa8, 0xe4},
+ },
+ {
+ Name: "insert.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x9a, 0x56, 0x61, 0xf5, 0x9a, 0x72, 0x95, 0x2b, 0xe6, 0xc1, 0x67, 0xa0, 0xc2, 0xdb, 0x15, 0x9b, 0x91, 0xb7, 0x1f, 0xae},
+ },
+ {
+ Name: "journal.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xfe, 0xd2, 0x7b, 0xe3, 0xe3, 0x80, 0x55, 0xd2, 0x20, 0x43, 0x95, 0xcd, 0xe6, 0xff, 0xc9, 0x45, 0x89, 0xfb, 0xf5, 0xe8},
+ },
+ {
+ Name: "legacy.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x94, 0x64, 0x9a, 0xe7, 0x5, 0xab, 0x93, 0x85, 0x10, 0x8d, 0xd, 0x88, 0x7a, 0xf0, 0x75, 0x92, 0x89, 0xfb, 0x23, 0xcb},
+ },
+ {
+ Name: "lempar.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x2a, 0xfa, 0xa6, 0xce, 0xa6, 0xd8, 0x29, 0x60, 0x2c, 0x27, 0x86, 0xc1, 0xf8, 0xa3, 0x7f, 0x56, 0x7c, 0xf6, 0xfd, 0x53},
+ },
+ {
+ Name: "loadext.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xcd, 0xcf, 0x6a, 0x93, 0xb8, 0xc4, 0xf, 0x91, 0x4b, 0x94, 0x24, 0xe, 0xf1, 0x4c, 0xb4, 0xa3, 0xa, 0x37, 0xec, 0xa1},
+ },
+ {
+ Name: "main.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x39, 0xf6, 0x4, 0x21, 0xe6, 0x81, 0x27, 0x7c, 0xc3, 0xdb, 0xa0, 0x9a, 0xbe, 0x7c, 0xf7, 0x90, 0xd5, 0x28, 0xf5, 0xc3},
+ },
+ {
+ Name: "malloc.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x35, 0xa4, 0x4e, 0x5f, 0x61, 0xc2, 0xe4, 0x4c, 0x48, 0x1c, 0x62, 0x51, 0xbd, 0xa, 0xae, 0x7a, 0xcd, 0xa4, 0xde, 0xb},
+ },
+ {
+ Name: "mem0.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xd, 0xb, 0x66, 0x67, 0xd6, 0xa, 0x95, 0x5a, 0x6, 0x96, 0xdf, 0x62, 0x89, 0xb4, 0x91, 0x78, 0x96, 0x93, 0x43, 0xaa},
+ },
+ {
+ Name: "mem1.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x35, 0x78, 0x49, 0x6f, 0x33, 0x3, 0x7, 0xb2, 0x31, 0xdf, 0xb5, 0x3c, 0xc, 0x2e, 0x1c, 0x6b, 0x32, 0x3d, 0x79, 0x1e},
+ },
+ {
+ Name: "mem2.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x26, 0x44, 0x8e, 0xa8, 0xaa, 0xe0, 0x36, 0x6a, 0xf0, 0x54, 0x1a, 0xfe, 0xa4, 0x79, 0xb, 0x42, 0xf4, 0xa6, 0x9b, 0x5a},
+ },
+ {
+ Name: "mem3.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x1a, 0x1b, 0x79, 0x1f, 0x28, 0xf8, 0xcf, 0x3c, 0xe4, 0xf9, 0xa3, 0x5c, 0xda, 0xd7, 0xb7, 0x10, 0x75, 0x68, 0xc7, 0x15},
+ },
+ {
+ Name: "mem5.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x78, 0x3c, 0xef, 0x61, 0x76, 0xc5, 0x9c, 0xbf, 0x30, 0x91, 0x46, 0x31, 0x9, 0x5a, 0x1a, 0x54, 0xf4, 0xe4, 0x2e, 0x8},
+ },
+ {
+ Name: "memjournal.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x5, 0x72, 0x59, 0x48, 0xf6, 0x5d, 0x42, 0x7b, 0x7, 0xf7, 0xf9, 0x29, 0xac, 0xa3, 0xff, 0x22, 0x4b, 0x17, 0x53, 0xdf},
+ },
+ {
+ Name: "mutex.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xb5, 0x67, 0xe7, 0xc2, 0x7e, 0xf2, 0x4, 0x10, 0x86, 0xaf, 0xe0, 0xf6, 0x96, 0x66, 0xe2, 0x7b, 0xf5, 0x9, 0x8a, 0x59},
+ },
+ {
+ Name: "mutex.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x9, 0x78, 0x81, 0x22, 0x52, 0x77, 0x89, 0xa, 0x9c, 0x36, 0xc2, 0x4d, 0x41, 0xf6, 0x11, 0x4d, 0x64, 0xc0, 0x6d, 0xb3},
+ },
+ {
+ Name: "mutex_noop.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x45, 0x6e, 0x82, 0xa2, 0x5e, 0x27, 0x1b, 0x6, 0x14, 0xe7, 0xf4, 0xf8, 0x3c, 0x22, 0x85, 0x53, 0xb7, 0xfa, 0x1, 0x58},
+ },
+ {
+ Name: "mutex_unix.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xec, 0xa7, 0x29, 0x58, 0x31, 0xc2, 0xf0, 0xee, 0x48, 0xba, 0x54, 0xd0, 0x62, 0x91, 0x4d, 0x6, 0xa1, 0xdd, 0x8e, 0xbe},
+ },
+ {
+ Name: "mutex_w32.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x27, 0xd1, 0xa, 0xf5, 0xbd, 0x33, 0x1b, 0xdb, 0x97, 0x3f, 0x61, 0x45, 0xb7, 0x4f, 0x72, 0xb6, 0x7, 0xcf, 0xc4, 0x6e},
+ },
+ {
+ Name: "notify.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xfc, 0xab, 0x5b, 0xfa, 0xf0, 0x19, 0x8, 0xd3, 0xde, 0x93, 0xfa, 0x88, 0xb5, 0xea, 0xe9, 0xe9, 0x6c, 0xa3, 0xc8, 0xe8},
+ },
+ {
+ Name: "os.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xbe, 0x2e, 0xa4, 0xcf, 0xc0, 0x19, 0x59, 0x93, 0xa3, 0x40, 0xc9, 0x2, 0xae, 0xdd, 0xf1, 0xbe, 0x4b, 0x8e, 0xd7, 0x3a},
+ },
+ {
+ Name: "os.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x7, 0xa, 0x2d, 0xdd, 0x17, 0xf7, 0x71, 0xf9, 0x8f, 0xf8, 0xcc, 0xd6, 0xf0, 0x33, 0xbd, 0xac, 0xc5, 0xe9, 0xf6, 0xc},
+ },
+ {
+ Name: "os_common.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xf6, 0xc3, 0xe7, 0xff, 0x89, 0x46, 0x30, 0x86, 0x40, 0x18, 0x22, 0xf4, 0x81, 0xe7, 0xe3, 0xb8, 0x7b, 0x2c, 0x78, 0xc7},
+ },
+ {
+ Name: "os_unix.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xab, 0xc2, 0x3a, 0x45, 0x2e, 0x72, 0xf7, 0x1c, 0x76, 0xaf, 0xa9, 0x98, 0x3c, 0x3a, 0xd9, 0xd4, 0x25, 0x61, 0x6c, 0x6d},
+ },
+ {
+ Name: "os_win.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xae, 0xb0, 0x88, 0x14, 0xb3, 0xda, 0xbe, 0x81, 0xb8, 0x4c, 0xda, 0x91, 0x85, 0x82, 0xb0, 0xf, 0xfd, 0x86, 0xe4, 0x87},
+ },
+ {
+ Name: "pager.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x61, 0x72, 0x7f, 0xaa, 0x9c, 0xf, 0x3d, 0x56, 0x62, 0x65, 0xbe, 0x7e, 0xec, 0x5b, 0x2a, 0x35, 0xf6, 0xa4, 0xbc, 0x9f},
+ },
+ {
+ Name: "pager.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x6f, 0x65, 0x91, 0x36, 0xe2, 0x76, 0x7, 0x9d, 0xa4, 0x3a, 0x2e, 0x39, 0xe1, 0xb6, 0x86, 0x37, 0xec, 0xad, 0xcf, 0x68},
+ },
+ {
+ Name: "parse.y", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x83, 0x10, 0xb2, 0x69, 0x89, 0xb0, 0x5b, 0xed, 0x1e, 0x1b, 0x3, 0xda, 0x80, 0xf5, 0xc0, 0xa5, 0x2e, 0x9a, 0xd1, 0xd2},
+ },
+ {
+ Name: "pcache.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x48, 0x2a, 0x18, 0x8b, 0xee, 0x19, 0x91, 0xbc, 0x8a, 0xda, 0xc9, 0x6a, 0x19, 0x3a, 0x53, 0xe5, 0x46, 0x2a, 0x8c, 0x10},
+ },
+ {
+ Name: "pcache.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xf4, 0xd4, 0xad, 0x71, 0xc1, 0xd, 0x78, 0xc6, 0xda, 0xbd, 0xe2, 0x52, 0x15, 0xcd, 0x41, 0x5a, 0x76, 0x1, 0x48, 0xca},
+ },
+ {
+ Name: "pcache1.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x41, 0x47, 0xd2, 0xef, 0xf5, 0x5b, 0xdd, 0x9f, 0xf7, 0xc6, 0x86, 0xc, 0x60, 0x18, 0x10, 0x20, 0x16, 0x6c, 0x5f, 0x50},
+ },
+ {
+ Name: "pragma.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x22, 0x97, 0x71, 0x69, 0x61, 0x7d, 0x49, 0x22, 0xb3, 0x99, 0x3f, 0x76, 0x9d, 0x90, 0xfa, 0x7b, 0xc4, 0x41, 0xea, 0x50},
+ },
+ {
+ Name: "prepare.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xd7, 0x8d, 0x83, 0xcb, 0xd8, 0x78, 0x97, 0xf5, 0x73, 0x30, 0x3f, 0x9f, 0x57, 0xab, 0x8d, 0xe0, 0x24, 0xa6, 0xe3, 0xf8},
+ },
+ {
+ Name: "printf.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x9f, 0x68, 0xd2, 0x4, 0xff, 0xdc, 0x9f, 0x3d, 0x42, 0x7f, 0x80, 0xa8, 0x23, 0x9a, 0x7f, 0xa3, 0xa9, 0x8a, 0xec, 0xbd},
+ },
+ {
+ Name: "random.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x23, 0x4e, 0xbd, 0xf6, 0x58, 0xf4, 0x36, 0xcc, 0x7c, 0x68, 0xf0, 0x27, 0xc4, 0x8b, 0xe, 0x1b, 0x9b, 0xa3, 0x4e, 0x98},
+ },
+ {
+ Name: "resolve.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x91, 0xef, 0xca, 0xa1, 0xa1, 0x6b, 0xfc, 0x98, 0xfb, 0x35, 0xd8, 0x5c, 0xad, 0x15, 0x6b, 0x93, 0x53, 0x3e, 0x4e, 0x6},
+ },
+ {
+ Name: "rowset.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x57, 0x61, 0xf9, 0x85, 0x50, 0xb1, 0x76, 0xcc, 0xe1, 0x1d, 0xcb, 0xce, 0xc9, 0x38, 0x99, 0xa0, 0x75, 0xbb, 0x64, 0xfd},
+ },
+ {
+ Name: "select.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xf3, 0xf1, 0x49, 0x9, 0x63, 0x95, 0x5b, 0x8e, 0xd0, 0xc9, 0xfe, 0x6e, 0x1e, 0xec, 0x83, 0x6c, 0x1a, 0x52, 0x94, 0xb4},
+ },
+ {
+ Name: "shell.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x1b, 0xe2, 0x87, 0x1f, 0xed, 0x9a, 0x1f, 0xdf, 0x1d, 0xf7, 0x19, 0x8e, 0x11, 0x25, 0x36, 0x0, 0xec, 0xba, 0x76, 0xcc},
+ },
+ {
+ Name: "sqlcipher.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x82, 0x75, 0x30, 0x95, 0xcd, 0x17, 0x23, 0xc5, 0xff, 0x4f, 0x11, 0x15, 0xe4, 0x97, 0x55, 0x91, 0xee, 0x34, 0xf5, 0xce},
+ },
+ {
+ Name: "sqlite.h.in", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x66, 0x8, 0x82, 0x31, 0x75, 0xde, 0x5b, 0x6a, 0xd, 0x37, 0x8f, 0xdb, 0xc, 0x38, 0x18, 0xb6, 0xab, 0x4f, 0xbf, 0x8e},
+ },
+ {
+ Name: "sqlite3.rc", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x96, 0x98, 0x76, 0xda, 0x1e, 0x57, 0x14, 0x3d, 0xe0, 0xb4, 0xd1, 0xc7, 0x62, 0x9f, 0xd3, 0x35, 0x6f, 0x2e, 0x1c, 0x96},
+ },
+ {
+ Name: "sqlite3ext.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x92, 0x8b, 0xb3, 0xba, 0xd9, 0xdd, 0x64, 0x3c, 0x30, 0x1d, 0xd2, 0xb0, 0xac, 0x22, 0x28, 0x7a, 0x81, 0x28, 0x48, 0x84},
+ },
+ {
+ Name: "sqliteInt.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x59, 0x50, 0xf2, 0x37, 0xd9, 0xf9, 0xf2, 0xd3, 0xef, 0x6b, 0xd8, 0xbe, 0x34, 0x2d, 0xcf, 0x64, 0x89, 0x22, 0x51, 0x42},
+ },
+ {
+ Name: "sqliteLimit.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xc7, 0xae, 0xe5, 0x3c, 0xeb, 0xca, 0x94, 0xda, 0x51, 0xe7, 0x1a, 0x82, 0x2e, 0xa5, 0xa6, 0xde, 0xb9, 0x3, 0x85, 0xdf},
+ },
+ {
+ Name: "status.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x28, 0x34, 0x9e, 0x6d, 0x3d, 0x20, 0x88, 0xe0, 0x0, 0x3b, 0x76, 0xf8, 0xa, 0x89, 0x54, 0xfa, 0xec, 0x59, 0x30, 0xba},
+ },
+ {
+ Name: "table.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x26, 0xbb, 0xfb, 0x4f, 0x45, 0x6c, 0x42, 0x98, 0x25, 0x29, 0xea, 0x1a, 0x63, 0xa0, 0x17, 0x51, 0xdd, 0x3e, 0xe9, 0x5a},
+ },
+ {
+ Name: "tclsqlite.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xf1, 0xbb, 0x29, 0x21, 0xda, 0xc, 0x68, 0xa4, 0xf1, 0xc8, 0xe1, 0x5c, 0xf5, 0x66, 0xb2, 0x33, 0xe9, 0x2a, 0x51, 0x9f},
+ },
+ {
+ Name: "test1.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xa6, 0x38, 0xe4, 0x80, 0xad, 0xdf, 0x14, 0x43, 0x9c, 0xdf, 0xa4, 0xee, 0x16, 0x4d, 0xc3, 0x1b, 0x79, 0xf8, 0xbc, 0xac},
+ },
+ {
+ Name: "test2.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xd1, 0x30, 0xe9, 0xd0, 0x1b, 0x70, 0x24, 0xa5, 0xec, 0x6d, 0x73, 0x5, 0x92, 0xee, 0x4d, 0x1f, 0xb0, 0x2c, 0xfd, 0xb4},
+ },
+ {
+ Name: "test3.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xe3, 0xed, 0x31, 0xc, 0x81, 0x4, 0xfe, 0x36, 0x21, 0xce, 0xbb, 0xf, 0x51, 0xd1, 0x1, 0x45, 0x1, 0x8d, 0x4f, 0xac},
+ },
+ {
+ Name: "test4.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xa6, 0x37, 0x5c, 0x7c, 0xc4, 0x3, 0xf6, 0xc, 0xaa, 0xb7, 0xe9, 0x59, 0x53, 0x3e, 0x3d, 0xb1, 0xff, 0x75, 0xa, 0xe4},
+ },
+ {
+ Name: "test5.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x30, 0x3d, 0x12, 0x5, 0xb2, 0x26, 0x28, 0x42, 0x3d, 0x98, 0x6f, 0x71, 0xe2, 0x7c, 0x7c, 0xf7, 0x14, 0xa7, 0x45, 0xa6},
+ },
+ {
+ Name: "test6.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xc1, 0x51, 0xea, 0x42, 0x98, 0x9b, 0xb, 0xe2, 0x4e, 0xe4, 0xb9, 0xa4, 0xbe, 0x37, 0x8b, 0x4f, 0x63, 0x6d, 0xb6, 0x41},
+ },
+ {
+ Name: "test7.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x3c, 0xd4, 0xa2, 0x24, 0xd7, 0xe8, 0xe1, 0x6b, 0xd7, 0xcb, 0xe4, 0x9e, 0x2d, 0x3e, 0x94, 0xce, 0x9b, 0x17, 0xbd, 0x76},
+ },
+ {
+ Name: "test8.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xc5, 0x73, 0x93, 0x32, 0xd4, 0x6e, 0x57, 0x12, 0x1d, 0xa2, 0x7c, 0x3e, 0x88, 0xfd, 0xe7, 0x5a, 0xeb, 0x87, 0x10, 0xf7},
+ },
+ {
+ Name: "test9.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xe5, 0x99, 0x3e, 0x8f, 0xf7, 0x8f, 0x61, 0xc2, 0x43, 0x5b, 0x6f, 0x97, 0xa3, 0xb4, 0x63, 0xe2, 0x27, 0xc7, 0x67, 0xac},
+ },
+ {
+ Name: "test_async.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xb0, 0xb9, 0x43, 0x18, 0x5b, 0xfc, 0x23, 0xc1, 0x7f, 0xd0, 0x8f, 0x55, 0x76, 0x8c, 0xac, 0x12, 0xa9, 0xf5, 0x69, 0x51},
+ },
+ {
+ Name: "test_autoext.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xb5, 0x1, 0x3f, 0x31, 0x73, 0xa2, 0x17, 0x6e, 0x2d, 0x9f, 0xc, 0xaa, 0x99, 0x19, 0x30, 0x36, 0xbf, 0xc3, 0x7e, 0x91},
+ },
+ {
+ Name: "test_backup.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xe9, 0x67, 0x42, 0x4a, 0x29, 0xf, 0x73, 0x8a, 0xec, 0xfd, 0xac, 0x57, 0x8e, 0x9b, 0x87, 0xa4, 0xc4, 0xae, 0x8d, 0x7f},
+ },
+ {
+ Name: "test_btree.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xdb, 0x72, 0x88, 0x9b, 0x2a, 0xfb, 0x62, 0x72, 0x82, 0x8d, 0xda, 0x86, 0x6d, 0xcc, 0xf1, 0x22, 0xa4, 0x9a, 0x72, 0x99},
+ },
+ {
+ Name: "test_config.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x53, 0x47, 0x27, 0xa0, 0x80, 0x42, 0xb6, 0xca, 0xd6, 0x7e, 0x26, 0x7e, 0x87, 0xb4, 0x3, 0xa4, 0x1a, 0x73, 0xb2, 0x99},
+ },
+ {
+ Name: "test_demovfs.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x63, 0x76, 0x27, 0x7, 0x1d, 0x9e, 0x28, 0xf4, 0xb3, 0x45, 0x1b, 0xbb, 0xdd, 0xf8, 0x8, 0xd1, 0xa9, 0x12, 0x0, 0xf8},
+ },
+ {
+ Name: "test_devsym.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x21, 0xf0, 0xf6, 0x84, 0xd8, 0x61, 0x11, 0x67, 0x70, 0xde, 0xfc, 0xde, 0xcd, 0x53, 0x2b, 0xa3, 0xee, 0xab, 0xa9, 0x75},
+ },
+ {
+ Name: "test_fs.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x47, 0x8c, 0xad, 0x80, 0xb1, 0x6a, 0x90, 0x9b, 0x23, 0xbd, 0x3, 0xc2, 0xda, 0xd8, 0xb4, 0x49, 0xa7, 0x45, 0x87, 0xa1},
+ },
+ {
+ Name: "test_func.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x6f, 0x9b, 0xb0, 0x3d, 0xc8, 0x8a, 0x21, 0xd6, 0x58, 0xbf, 0x99, 0x99, 0xba, 0xf6, 0x6d, 0xc1, 0xd5, 0x2e, 0xbc, 0x54},
+ },
+ {
+ Name: "test_hexio.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xb2, 0xb, 0x5c, 0xe7, 0x30, 0xab, 0x7f, 0xa8, 0x0, 0xd2, 0xd0, 0xcc, 0x38, 0xc7, 0x72, 0x75, 0x59, 0x3e, 0xbd, 0xbb},
+ },
+ {
+ Name: "test_init.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xe3, 0x72, 0x4d, 0x8b, 0xe3, 0x14, 0xdb, 0x9, 0xee, 0xa8, 0x4, 0xb, 0x9d, 0xdf, 0xc8, 0xa8, 0xbe, 0xee, 0x22, 0x91},
+ },
+ {
+ Name: "test_intarray.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xf5, 0xc3, 0xd9, 0xe4, 0x5, 0x9a, 0x16, 0x56, 0x7, 0x34, 0x7, 0xe4, 0x3a, 0x92, 0x11, 0x79, 0x99, 0x69, 0x7b, 0x93},
+ },
+ {
+ Name: "test_intarray.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x69, 0x13, 0x37, 0xd1, 0xae, 0xd6, 0x37, 0x15, 0xd6, 0x2e, 0x76, 0x26, 0x6f, 0xf, 0x3b, 0x50, 0x8b, 0x1, 0xa, 0x34},
+ },
+ {
+ Name: "test_journal.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xe8, 0x70, 0x1a, 0x4e, 0xea, 0xdb, 0x8e, 0xad, 0x16, 0x9d, 0x60, 0x6, 0x40, 0x7d, 0x54, 0xa8, 0x98, 0x59, 0x2d, 0x70},
+ },
+ {
+ Name: "test_loadext.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x11, 0x37, 0xe3, 0xa9, 0xaa, 0xe9, 0x29, 0x6, 0xb8, 0x28, 0x9f, 0x6c, 0x3d, 0xaa, 0x61, 0xf0, 0xd0, 0x70, 0xf5, 0x5a},
+ },
+ {
+ Name: "test_malloc.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xcf, 0x98, 0xa8, 0xfb, 0x21, 0x82, 0xc0, 0xba, 0xf5, 0xa, 0xd5, 0x79, 0x79, 0xb6, 0x75, 0xbb, 0x70, 0x7a, 0x93, 0xb0},
+ },
+ {
+ Name: "test_multiplex.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x62, 0x45, 0x41, 0xb3, 0x2a, 0x10, 0xd2, 0x1a, 0x2f, 0xd1, 0xa, 0x35, 0xee, 0x66, 0x32, 0xbd, 0xac, 0x55, 0x2d, 0x41},
+ },
+ {
+ Name: "test_multiplex.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xb7, 0xe1, 0xaf, 0xea, 0x5f, 0xd7, 0x8b, 0x87, 0x58, 0x2, 0x65, 0xf8, 0x4c, 0x81, 0x61, 0x2c, 0xbd, 0x2, 0x5b, 0xaf},
+ },
+ {
+ Name: "test_mutex.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xc9, 0xb4, 0xa2, 0x9a, 0xb7, 0x5c, 0x77, 0xea, 0x5f, 0x36, 0xb5, 0x19, 0x32, 0x56, 0xd7, 0xf, 0xe6, 0x58, 0xe, 0x95},
+ },
+ {
+ Name: "test_onefile.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x69, 0x86, 0x74, 0x41, 0xb8, 0xcc, 0x9a, 0x62, 0x1a, 0xf3, 0x24, 0x13, 0xfc, 0x63, 0xda, 0x80, 0x99, 0x37, 0x64, 0xf4},
+ },
+ {
+ Name: "test_osinst.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x53, 0x14, 0x33, 0x31, 0x3e, 0xe3, 0x6c, 0x7, 0xeb, 0x21, 0xc0, 0x2f, 0x31, 0x15, 0xcb, 0x7a, 0x37, 0x48, 0x6c, 0x79},
+ },
+ {
+ Name: "test_pcache.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x8f, 0xcf, 0xe7, 0xe2, 0x6e, 0x3f, 0xf1, 0x74, 0x96, 0xb8, 0x40, 0xf5, 0xd6, 0x3c, 0x75, 0x78, 0x3a, 0xff, 0x81, 0x62},
+ },
+ {Name: "test_quota.c", Mode: 0x81a4, Hash: plumbing.Hash{
+ 0xe5, 0x90, 0x99, 0x6c, 0xa4, 0xb8, 0x57, 0x4a, 0xb1, 0xe4, 0x18, 0x5d, 0x57, 0x77, 0x56, 0x66, 0x4a, 0xd2, 0x49, 0x5f}}, {Name: "test_quota.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x2d, 0x7, 0x67, 0xa1, 0x9a, 0xb7, 0xc3, 0xa4, 0x21, 0xcd, 0xba, 0x6a, 0x3, 0x49, 0x20, 0x43, 0x67, 0xc2, 0x2c, 0x81},
+ },
+ {
+ Name: "test_rtree.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xf5, 0x4a, 0xe9, 0xb0, 0x63, 0xbb, 0x73, 0x71, 0x2f, 0xcf, 0xc1, 0xc6, 0x83, 0x2e, 0x2a, 0x50, 0xf6, 0x2a, 0x97, 0xe7},
+ },
+ {
+ Name: "test_schema.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x12, 0x64, 0x44, 0x67, 0x64, 0x7d, 0x51, 0x39, 0x4a, 0x1, 0xf9, 0xfa, 0x60, 0x37, 0x62, 0x98, 0x18, 0x54, 0x66, 0xfd},
+ },
+ {
+ Name: "test_server.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xed, 0x8, 0x18, 0xe6, 0xf6, 0x5f, 0x27, 0x28, 0x2d, 0xc7, 0xb1, 0xc1, 0x90, 0xec, 0x18, 0x8c, 0x89, 0x33, 0x0, 0x2b},
+ },
+ {
+ Name: "test_sqllog.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x4a, 0xa6, 0x8b, 0x7c, 0x42, 0x93, 0x23, 0xb8, 0xee, 0xbe, 0x6c, 0x9c, 0x2d, 0x7, 0xfc, 0x66, 0xd, 0x8d, 0x47, 0xc9},
+ },
+ {
+ Name: "test_stat.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xd4, 0xc9, 0x2, 0xb5, 0xea, 0x11, 0x1a, 0xd5, 0x8a, 0x73, 0x71, 0x12, 0xc2, 0x8f, 0x0, 0x38, 0x43, 0x4c, 0x85, 0xc0},
+ },
+ {
+ Name: "test_superlock.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x93, 0x6f, 0xca, 0xd0, 0xc5, 0x6f, 0x6b, 0xc8, 0x58, 0x9, 0x74, 0x2f, 0x6a, 0xe1, 0xc1, 0xee, 0xb8, 0xb7, 0xd2, 0xf1},
+ },
+ {
+ Name: "test_syscall.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x7c, 0x8, 0x73, 0xc1, 0x6d, 0x84, 0x32, 0x2, 0xf3, 0xe, 0x2d, 0xb9, 0x45, 0x9f, 0xa2, 0x99, 0x75, 0xea, 0x5e, 0x68},
+ },
+ {
+ Name: "test_tclvar.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x12, 0x19, 0x19, 0xc, 0x3, 0x0, 0xfd, 0x5e, 0xc7, 0xa3, 0xc5, 0x84, 0x8, 0xf3, 0x38, 0x43, 0xd2, 0xe, 0xee, 0x15},
+ },
+ {
+ Name: "test_thread.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x2f, 0x93, 0x63, 0xb7, 0x50, 0x1e, 0x51, 0x19, 0x81, 0xfe, 0x32, 0x83, 0x1f, 0xf2, 0xe8, 0xfd, 0x2f, 0x30, 0xc4, 0x93},
+ },
+ {
+ Name: "test_vfs.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xfc, 0xd5, 0x77, 0x43, 0x9c, 0xfd, 0x6c, 0x72, 0xdd, 0xe4, 0x83, 0x58, 0x92, 0x14, 0x20, 0xcf, 0x6e, 0xf1, 0xf8, 0x6d},
+ },
+ {
+ Name: "test_vfstrace.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xa, 0xac, 0xc0, 0x1f, 0xe4, 0x2e, 0x77, 0xfe, 0xb8, 0x58, 0xe4, 0xbe, 0xd0, 0xcb, 0x7e, 0x4, 0xa4, 0x35, 0xb2, 0x10},
+ },
+ {
+ Name: "test_wsd.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x99, 0xe4, 0xa0, 0x56, 0x58, 0x1f, 0x58, 0xf4, 0x53, 0x6f, 0xdb, 0x5a, 0x5d, 0xf7, 0x5c, 0x74, 0x69, 0x8a, 0x81, 0x62},
+ },
+ {
+ Name: "tokenize.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xfa, 0xea, 0x5f, 0x26, 0xc7, 0x9c, 0x5e, 0x18, 0x8f, 0xa8, 0x7f, 0x2f, 0xdf, 0x6f, 0xf7, 0x6a, 0x7a, 0x60, 0x6, 0xc5},
+ },
+ {
+ Name: "trigger.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xf1, 0xff, 0x76, 0x6e, 0x20, 0x2a, 0x45, 0x18, 0xec, 0x10, 0xe5, 0x27, 0x12, 0xc, 0xd3, 0xe, 0x83, 0xfb, 0xd0, 0x34},
+ },
+ {
+ Name: "update.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x3a, 0xb1, 0xab, 0x2a, 0x4b, 0x65, 0xda, 0x3f, 0x19, 0x8c, 0x15, 0x84, 0xd5, 0x4d, 0x36, 0xf1, 0x8c, 0xa1, 0x21, 0x4a},
+ },
+ {
+ Name: "utf.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x6d, 0x5b, 0x1b, 0xfe, 0x40, 0xc, 0x37, 0x48, 0xaa, 0x70, 0xa3, 0xb2, 0xfd, 0x5e, 0xe, 0xac, 0x5f, 0xc0, 0x4d, 0xe2},
+ },
+ {
+ Name: "util.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xd8, 0x3a, 0x63, 0x1, 0x5f, 0xd8, 0x7d, 0xcc, 0x4f, 0xb4, 0x41, 0x66, 0xfa, 0xbf, 0x2e, 0x9b, 0xc9, 0x67, 0x1e, 0xb8},
+ },
+ {
+ Name: "vacuum.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x4a, 0xfb, 0x2c, 0xca, 0x64, 0xdd, 0x60, 0x76, 0x11, 0x22, 0x2c, 0x7, 0x93, 0x2d, 0x12, 0xea, 0xcf, 0xa, 0x2c, 0x22},
+ },
+ {
+ Name: "vdbe.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xf3, 0x43, 0xe1, 0x3d, 0x4e, 0x91, 0x78, 0x4b, 0x15, 0x88, 0x10, 0xc5, 0xb7, 0xd4, 0x46, 0x84, 0xdf, 0xbf, 0xa2, 0xa5},
+ },
+ {
+ Name: "vdbe.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xfa, 0x7b, 0x31, 0xb7, 0x27, 0xa, 0x90, 0xd4, 0xf6, 0x37, 0x36, 0x5a, 0xfc, 0xc9, 0xbd, 0xa1, 0xd1, 0xb1, 0xe1, 0xd6},
+ },
+ {
+ Name: "vdbeInt.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x3a, 0x5b, 0x40, 0x28, 0xbb, 0xd6, 0xc9, 0x56, 0x10, 0xd7, 0xc, 0xce, 0x3, 0x69, 0xdf, 0xcd, 0x60, 0x7a, 0xa9, 0x0},
+ },
+ {
+ Name: "vdbeapi.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x7c, 0x86, 0x1e, 0x2d, 0x47, 0x21, 0x8c, 0x91, 0x63, 0x31, 0x77, 0x77, 0xc3, 0x7, 0x21, 0x99, 0xe9, 0xb4, 0x2, 0x80},
+ },
+ {
+ Name: "vdbeaux.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x2c, 0x42, 0x69, 0xa5, 0x9e, 0x6d, 0xbc, 0xe8, 0x67, 0x1c, 0x47, 0x4f, 0x34, 0x61, 0x90, 0xbe, 0x2a, 0xe, 0x18, 0x51},
+ },
+ {
+ Name: "vdbeblob.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x2e, 0x8f, 0xd8, 0xee, 0x74, 0x47, 0xe6, 0x46, 0x46, 0xe3, 0x49, 0x4b, 0x4c, 0x4, 0x1d, 0x3a, 0x4a, 0xbb, 0x8, 0x85},
+ },
+ {
+ Name: "vdbemem.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x8f, 0xc2, 0x22, 0xe2, 0xde, 0x20, 0x50, 0x14, 0x50, 0xec, 0xea, 0x9d, 0x4e, 0xbf, 0xaa, 0xc9, 0x81, 0x4a, 0xae, 0x59},
+ },
+ {
+ Name: "vdbesort.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xfd, 0xfc, 0x4a, 0x79, 0xdd, 0xc9, 0x6e, 0x59, 0x9b, 0x1b, 0xe, 0xeb, 0xac, 0xbd, 0xb8, 0x45, 0xc6, 0x38, 0x13, 0xb2},
+ },
+ {
+ Name: "vdbetrace.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x35, 0x62, 0x77, 0xe8, 0xd2, 0x3b, 0xca, 0xdb, 0x67, 0x6b, 0x59, 0xd1, 0xa4, 0xdc, 0xf8, 0x42, 0xfd, 0xc4, 0xc9, 0x72},
+ },
+ {
+ Name: "vtab.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x95, 0x82, 0x2, 0xc3, 0x1e, 0x24, 0x15, 0xb, 0x60, 0xf1, 0xa, 0x8a, 0xf, 0x74, 0x41, 0xaf, 0xac, 0x3f, 0xbb, 0x1c},
+ },
+ {
+ Name: "wal.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xe6, 0x42, 0xea, 0x21, 0x5, 0xb5, 0xc5, 0x4a, 0xf3, 0x5, 0x88, 0x9, 0x62, 0x69, 0xab, 0x75, 0xcb, 0xef, 0x8f, 0xf2},
+ },
+ {
+ Name: "wal.h", Mode: 0x81a4,
+ Hash: plumbing.Hash{0x9, 0x25, 0x46, 0x35, 0x4b, 0x34, 0xc0, 0xab, 0x3d, 0x20, 0x5, 0x6a, 0x7f, 0x8a, 0x8a, 0x52, 0xe4, 0xd0, 0xb5, 0xf5},
+ },
+ {
+ Name: "walker.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xe7, 0x1e, 0xd2, 0xac, 0x48, 0x4c, 0x91, 0x6c, 0x1c, 0xc1, 0x0, 0x7e, 0x5e, 0x5, 0xda, 0x47, 0x1c, 0xb4, 0x95, 0x99},
+ },
+ {
+ Name: "where.c", Mode: 0x81a4,
+ Hash: plumbing.Hash{0xe6, 0x14, 0xf4, 0xa6, 0xd8, 0x64, 0xe7, 0xe, 0xc4, 0x32, 0x8d, 0xb, 0xdb, 0x25, 0x4e, 0x3a, 0xc9, 0xf0, 0xd2, 0x87},
+ },
+ },
+ Hash: plumbing.Hash{0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0},
+ s: (storer.EncodedObjectStorer)(nil),
+ m: map[string]*TreeEntry(nil),
+ }
+
+ var obtained Tree
+ err := obtained.Decode(obj)
+ c.Assert(err, IsNil)
+ c.Assert(entriesEquals(obtained.Entries, expected.Entries), Equals, true)
+}
diff --git a/plumbing/revlist/revlist.go b/plumbing/revlist/revlist.go
new file mode 100644
index 0000000..106e78b
--- /dev/null
+++ b/plumbing/revlist/revlist.go
@@ -0,0 +1,128 @@
+// Package revlist implements functions to walk the objects referenced by a
+// commit history. Roughly equivalent to git-rev-list command.
+package revlist
+
+import (
+ "io"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/object"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+)
+
+// Objects applies a complementary set. It gets all the hashes from all
+// the reachable objects from the given commits. Ignore param are object hashes
+// that we want to ignore on the result. It is a list because is
+// easier to interact with other porcelain elements, but internally it is
+// converted to a map. All that objects must be accessible from the object
+// storer.
+func Objects(
+ s storer.EncodedObjectStorer,
+ commits []*object.Commit,
+ ignore []plumbing.Hash) ([]plumbing.Hash, error) {
+
+ seen := hashListToSet(ignore)
+ result := make(map[plumbing.Hash]bool)
+ for _, c := range commits {
+ err := reachableObjects(s, c, seen, func(h plumbing.Hash) error {
+ if !seen[h] {
+ result[h] = true
+ seen[h] = true
+ }
+
+ return nil
+ })
+
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return hashSetToList(result), nil
+}
+
+// reachableObjects returns, using the callback function, all the reachable
+// objects from the specified commit. To avoid to iterate over seen commits,
+// if a commit hash is into the 'seen' set, we will not iterate all his trees
+// and blobs objects.
+func reachableObjects(
+ s storer.EncodedObjectStorer,
+ commit *object.Commit,
+ seen map[plumbing.Hash]bool,
+ cb func(h plumbing.Hash) error) error {
+
+ return iterateCommits(commit, func(commit *object.Commit) error {
+ if seen[commit.Hash] {
+ return nil
+ }
+
+ if err := cb(commit.Hash); err != nil {
+ return err
+ }
+
+ return iterateCommitTrees(s, commit, func(h plumbing.Hash) error {
+ return cb(h)
+ })
+ })
+}
+
+// iterateCommits iterate all reachable commits from the given one
+func iterateCommits(commit *object.Commit, cb func(c *object.Commit) error) error {
+ if err := cb(commit); err != nil {
+ return err
+ }
+
+ return object.WalkCommitHistory(commit, func(c *object.Commit) error {
+ return cb(c)
+ })
+}
+
+// iterateCommitTrees iterate all reachable trees from the given commit
+func iterateCommitTrees(
+ s storer.EncodedObjectStorer,
+ commit *object.Commit,
+ cb func(h plumbing.Hash) error) error {
+
+ tree, err := commit.Tree()
+ if err != nil {
+ return err
+ }
+ if err := cb(tree.Hash); err != nil {
+ return err
+ }
+
+ treeWalker := object.NewTreeWalker(tree, true)
+
+ for {
+ _, e, err := treeWalker.Next()
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return err
+ }
+ if err := cb(e.Hash); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func hashSetToList(hashes map[plumbing.Hash]bool) []plumbing.Hash {
+ var result []plumbing.Hash
+ for key := range hashes {
+ result = append(result, key)
+ }
+
+ return result
+}
+
+func hashListToSet(hashes []plumbing.Hash) map[plumbing.Hash]bool {
+ result := make(map[plumbing.Hash]bool)
+ for _, h := range hashes {
+ result[h] = true
+ }
+
+ return result
+}
diff --git a/plumbing/revlist/revlist_test.go b/plumbing/revlist/revlist_test.go
new file mode 100644
index 0000000..60ae660
--- /dev/null
+++ b/plumbing/revlist/revlist_test.go
@@ -0,0 +1,150 @@
+package revlist
+
+import (
+ "testing"
+
+ "gopkg.in/src-d/go-git.v4/fixtures"
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/object"
+ "gopkg.in/src-d/go-git.v4/plumbing/storer"
+ "gopkg.in/src-d/go-git.v4/storage/filesystem"
+
+ . "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) { TestingT(t) }
+
+type RevListSuite struct {
+ fixtures.Suite
+ Storer storer.EncodedObjectStorer
+}
+
+var _ = Suite(&RevListSuite{})
+
+const (
+ initialCommit = "b029517f6300c2da0f4b651b8642506cd6aaf45d"
+ secondCommit = "b8e471f58bcbca63b07bda20e428190409c2db47"
+
+ someCommit = "918c48b83bd081e863dbe1b80f8998f058cd8294"
+ someCommitBranch = "e8d3ffab552895c19b9fcf7aa264d277cde33881"
+ someCommitOtherBranch = "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"
+)
+
+// Created using: git log --graph --oneline --all
+//
+// Basic fixture repository commits tree:
+//
+// * 6ecf0ef vendor stuff
+// | * e8d3ffa some code in a branch
+// |/
+// * 918c48b some code
+// * af2d6a6 some json
+// * 1669dce Merge branch 'master'
+// |\
+// | * a5b8b09 Merge pull request #1
+// | |\
+// | | * b8e471f Creating changelog
+// | |/
+// * | 35e8510 binary file
+// |/
+// * b029517 Initial commit
+
+func (s *RevListSuite) SetUpTest(c *C) {
+ s.Suite.SetUpSuite(c)
+ sto, err := filesystem.NewStorage(fixtures.Basic().One().DotGit())
+ c.Assert(err, IsNil)
+ s.Storer = sto
+}
+
+func (s *RevListSuite) commit(c *C, h plumbing.Hash) *object.Commit {
+ commit, err := object.GetCommit(s.Storer, h)
+ c.Assert(err, IsNil)
+ return commit
+}
+
+// ---
+// | |\
+// | | * b8e471f Creating changelog
+// | |/
+// * | 35e8510 binary file
+// |/
+// * b029517 Initial commit
+func (s *RevListSuite) TestRevListObjects(c *C) {
+ revList := map[string]bool{
+ "b8e471f58bcbca63b07bda20e428190409c2db47": true, // second commit
+ "c2d30fa8ef288618f65f6eed6e168e0d514886f4": true, // init tree
+ "d3ff53e0564a9f87d8e84b6e28e5060e517008aa": true, // CHANGELOG
+ }
+
+ initCommit := s.commit(c, plumbing.NewHash(initialCommit))
+ secondCommit := s.commit(c, plumbing.NewHash(secondCommit))
+
+ localHist, err := Objects(s.Storer, []*object.Commit{initCommit}, nil)
+ c.Assert(err, IsNil)
+
+ remoteHist, err := Objects(s.Storer, []*object.Commit{secondCommit}, localHist)
+ c.Assert(err, IsNil)
+
+ for _, h := range remoteHist {
+ c.Assert(revList[h.String()], Equals, true)
+ }
+ c.Assert(len(remoteHist), Equals, len(revList))
+}
+
+func (s *RevListSuite) TestRevListObjectsReverse(c *C) {
+ initCommit := s.commit(c, plumbing.NewHash(initialCommit))
+ secondCommit := s.commit(c, plumbing.NewHash(secondCommit))
+
+ localHist, err := Objects(s.Storer, []*object.Commit{secondCommit}, nil)
+ c.Assert(err, IsNil)
+
+ remoteHist, err := Objects(s.Storer, []*object.Commit{initCommit}, localHist)
+ c.Assert(err, IsNil)
+
+ c.Assert(len(remoteHist), Equals, 0)
+}
+
+func (s *RevListSuite) TestRevListObjectsSameCommit(c *C) {
+ commit := s.commit(c, plumbing.NewHash(secondCommit))
+
+ localHist, err := Objects(s.Storer, []*object.Commit{commit}, nil)
+ c.Assert(err, IsNil)
+
+ remoteHist, err := Objects(s.Storer, []*object.Commit{commit}, localHist)
+ c.Assert(err, IsNil)
+
+ c.Assert(len(remoteHist), Equals, 0)
+}
+
+// * 6ecf0ef vendor stuff
+// | * e8d3ffa some code in a branch
+// |/
+// * 918c48b some code
+// -----
+func (s *RevListSuite) TestRevListObjectsNewBranch(c *C) {
+ someCommit := s.commit(c, plumbing.NewHash(someCommit))
+ someCommitBranch := s.commit(c, plumbing.NewHash(someCommitBranch))
+ someCommitOtherBranch := s.commit(c, plumbing.NewHash(someCommitOtherBranch))
+
+ localHist, err := Objects(s.Storer, []*object.Commit{someCommit}, nil)
+ c.Assert(err, IsNil)
+
+ remoteHist, err := Objects(
+ s.Storer, []*object.Commit{someCommitBranch, someCommitOtherBranch}, localHist)
+ c.Assert(err, IsNil)
+
+ revList := map[string]bool{
+ "a8d315b2b1c615d43042c3a62402b8a54288cf5c": true, // init tree
+ "cf4aa3b38974fb7d81f367c0830f7d78d65ab86b": true, // vendor folder
+ "9dea2395f5403188298c1dabe8bdafe562c491e3": true, // foo.go
+ "e8d3ffab552895c19b9fcf7aa264d277cde33881": true, // branch commit
+ "dbd3641b371024f44d0e469a9c8f5457b0660de1": true, // init tree
+ "7e59600739c96546163833214c36459e324bad0a": true, // README
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5": true, // otherBranch commit
+ }
+
+ for _, h := range remoteHist {
+ c.Assert(revList[h.String()], Equals, true)
+ }
+ c.Assert(len(remoteHist), Equals, len(revList))
+}
diff --git a/plumbing/storer/object.go b/plumbing/storer/object.go
index 60b9171..a733ee6 100644
--- a/plumbing/storer/object.go
+++ b/plumbing/storer/object.go
@@ -12,27 +12,30 @@ var (
ErrStop = errors.New("stop iter")
)
-// ObjectStorer generic storage of objects
-type ObjectStorer interface {
- // NewObject returns a new plumbing.Object, the real type of the object can
- // be a custom implementation or the defaul one, plumbing.MemoryObject
- NewObject() plumbing.Object
- // SetObject save an object into the storage, the object shuld be create
- // with the NewObject, method, and file if the type is not supported.
- SetObject(plumbing.Object) (plumbing.Hash, error)
- // Object get an object by hash with the given plumbing.ObjectType.
- // Implementors should return (nil, plumbing.ErrObjectNotFound) if an object
- // doesn't exist with both the given hash and object type.
+// EncodedObjectStorer generic storage of objects
+type EncodedObjectStorer interface {
+ // NewEncodedObject returns a new plumbing.EncodedObject, the real type
+ // of the object can be a custom implementation or the default one,
+ // plumbing.MemoryObject.
+ NewEncodedObject() plumbing.EncodedObject
+ // SetEncodedObject saves an object into the storage, the object should
+ // be create with the NewEncodedObject, method, and file if the type is
+ // not supported.
+ SetEncodedObject(plumbing.EncodedObject) (plumbing.Hash, error)
+ // EncodedObject gets an object by hash with the given
+ // plumbing.ObjectType. Implementors should return
+ // (nil, plumbing.ErrObjectNotFound) if an object doesn't exist with
+ // both the given hash and object type.
//
// Valid plumbing.ObjectType values are CommitObject, BlobObject, TagObject,
// TreeObject and AnyObject. If plumbing.AnyObject is given, the object must
// be looked up regardless of its type.
- Object(plumbing.ObjectType, plumbing.Hash) (plumbing.Object, error)
- // IterObjects returns a custom ObjectIter over all the object on the
- // storage.
+ EncodedObject(plumbing.ObjectType, plumbing.Hash) (plumbing.EncodedObject, error)
+ // IterObjects returns a custom EncodedObjectStorer over all the object
+ // on the storage.
//
// Valid plumbing.ObjectType values are CommitObject, BlobObject, TagObject,
- IterObjects(plumbing.ObjectType) (ObjectIter, error)
+ IterEncodedObjects(plumbing.ObjectType) (EncodedObjectIter, error)
}
// Transactioner is a optional method for ObjectStorer, it enable transaction
@@ -52,41 +55,41 @@ type PackfileWriter interface {
PackfileWriter() (io.WriteCloser, error)
}
-// ObjectIter is a generic closable interface for iterating over objects.
-type ObjectIter interface {
- Next() (plumbing.Object, error)
- ForEach(func(plumbing.Object) error) error
+// EncodedObjectIter is a generic closable interface for iterating over objects.
+type EncodedObjectIter interface {
+ Next() (plumbing.EncodedObject, error)
+ ForEach(func(plumbing.EncodedObject) error) error
Close()
}
// Transaction is an in-progress storage transaction. A transaction must end
// with a call to Commit or Rollback.
type Transaction interface {
- SetObject(plumbing.Object) (plumbing.Hash, error)
- Object(plumbing.ObjectType, plumbing.Hash) (plumbing.Object, error)
+ SetEncodedObject(plumbing.EncodedObject) (plumbing.Hash, error)
+ EncodedObject(plumbing.ObjectType, plumbing.Hash) (plumbing.EncodedObject, error)
Commit() error
Rollback() error
}
-// ObjectLookupIter implements ObjectIter. It iterates over a series of object
-// hashes and yields their associated objects by retrieving each one from
-// object storage. The retrievals are lazy and only occur when the iterator
-// moves forward with a call to Next().
+// EncodedObjectLookupIter implements EncodedObjectIter. It iterates over a
+// series of object hashes and yields their associated objects by retrieving
+// each one from object storage. The retrievals are lazy and only occur when the
+// iterator moves forward with a call to Next().
//
-// The ObjectLookupIter must be closed with a call to Close() when it is no
-// longer needed.
-type ObjectLookupIter struct {
- storage ObjectStorer
+// The EncodedObjectLookupIter must be closed with a call to Close() when it is
+// no longer needed.
+type EncodedObjectLookupIter struct {
+ storage EncodedObjectStorer
series []plumbing.Hash
t plumbing.ObjectType
pos int
}
-// NewObjectLookupIter returns an object iterator given an object storage and
-// a slice of object hashes.
-func NewObjectLookupIter(
- storage ObjectStorer, t plumbing.ObjectType, series []plumbing.Hash) *ObjectLookupIter {
- return &ObjectLookupIter{
+// NewEncodedObjectLookupIter returns an object iterator given an object storage
+// and a slice of object hashes.
+func NewEncodedObjectLookupIter(
+ storage EncodedObjectStorer, t plumbing.ObjectType, series []plumbing.Hash) *EncodedObjectLookupIter {
+ return &EncodedObjectLookupIter{
storage: storage,
series: series,
t: t,
@@ -97,13 +100,13 @@ func NewObjectLookupIter(
// the end it will return io.EOF as an error. If the object can't be found in
// the object storage, it will return plumbing.ErrObjectNotFound as an error.
// If the object is retreieved successfully error will be nil.
-func (iter *ObjectLookupIter) Next() (plumbing.Object, error) {
+func (iter *EncodedObjectLookupIter) Next() (plumbing.EncodedObject, error) {
if iter.pos >= len(iter.series) {
return nil, io.EOF
}
hash := iter.series[iter.pos]
- obj, err := iter.storage.Object(iter.t, hash)
+ obj, err := iter.storage.EncodedObject(iter.t, hash)
if err == nil {
iter.pos++
}
@@ -114,28 +117,30 @@ func (iter *ObjectLookupIter) Next() (plumbing.Object, error) {
// ForEach call the cb function for each object contained on this iter until
// an error happends or the end of the iter is reached. If ErrStop is sent
// the iteration is stop but no error is returned. The iterator is closed.
-func (iter *ObjectLookupIter) ForEach(cb func(plumbing.Object) error) error {
+func (iter *EncodedObjectLookupIter) ForEach(cb func(plumbing.EncodedObject) error) error {
return ForEachIterator(iter, cb)
}
// Close releases any resources used by the iterator.
-func (iter *ObjectLookupIter) Close() {
+func (iter *EncodedObjectLookupIter) Close() {
iter.pos = len(iter.series)
}
-// ObjectSliceIter implements ObjectIter. It iterates over a series of objects
-// stored in a slice and yields each one in turn when Next() is called.
+// EncodedObjectSliceIter implements EncodedObjectIter. It iterates over a
+// series of objects stored in a slice and yields each one in turn when Next()
+// is called.
//
-// The ObjectSliceIter must be closed with a call to Close() when it is no
-// longer needed.
-type ObjectSliceIter struct {
- series []plumbing.Object
+// The EncodedObjectSliceIter must be closed with a call to Close() when it is
+// no longer needed.
+type EncodedObjectSliceIter struct {
+ series []plumbing.EncodedObject
pos int
}
-// NewObjectSliceIter returns an object iterator for the given slice of objects.
-func NewObjectSliceIter(series []plumbing.Object) *ObjectSliceIter {
- return &ObjectSliceIter{
+// NewEncodedObjectSliceIter returns an object iterator for the given slice of
+// objects.
+func NewEncodedObjectSliceIter(series []plumbing.EncodedObject) *EncodedObjectSliceIter {
+ return &EncodedObjectSliceIter{
series: series,
}
}
@@ -143,7 +148,7 @@ func NewObjectSliceIter(series []plumbing.Object) *ObjectSliceIter {
// Next returns the next object from the iterator. If the iterator has reached
// the end it will return io.EOF as an error. If the object is retreieved
// successfully error will be nil.
-func (iter *ObjectSliceIter) Next() (plumbing.Object, error) {
+func (iter *EncodedObjectSliceIter) Next() (plumbing.EncodedObject, error) {
if len(iter.series) == 0 {
return nil, io.EOF
}
@@ -157,32 +162,34 @@ func (iter *ObjectSliceIter) Next() (plumbing.Object, error) {
// ForEach call the cb function for each object contained on this iter until
// an error happends or the end of the iter is reached. If ErrStop is sent
// the iteration is stop but no error is returned. The iterator is closed.
-func (iter *ObjectSliceIter) ForEach(cb func(plumbing.Object) error) error {
+func (iter *EncodedObjectSliceIter) ForEach(cb func(plumbing.EncodedObject) error) error {
return ForEachIterator(iter, cb)
}
// Close releases any resources used by the iterator.
-func (iter *ObjectSliceIter) Close() {
- iter.series = []plumbing.Object{}
+func (iter *EncodedObjectSliceIter) Close() {
+ iter.series = []plumbing.EncodedObject{}
}
-// MultiObjectIter implements ObjectIter. It iterates over several ObjectIter,
+// MultiEncodedObjectIter implements EncodedObjectIter. It iterates over several
+// EncodedObjectIter,
//
// The MultiObjectIter must be closed with a call to Close() when it is no
// longer needed.
-type MultiObjectIter struct {
- iters []ObjectIter
+type MultiEncodedObjectIter struct {
+ iters []EncodedObjectIter
pos int
}
-// NewMultiObjectIter returns an object iterator for the given slice of objects.
-func NewMultiObjectIter(iters []ObjectIter) ObjectIter {
- return &MultiObjectIter{iters: iters}
+// NewMultiEncodedObjectIter returns an object iterator for the given slice of
+// objects.
+func NewMultiEncodedObjectIter(iters []EncodedObjectIter) EncodedObjectIter {
+ return &MultiEncodedObjectIter{iters: iters}
}
// Next returns the next object from the iterator, if one iterator reach io.EOF
// is removed and the next one is used.
-func (iter *MultiObjectIter) Next() (plumbing.Object, error) {
+func (iter *MultiEncodedObjectIter) Next() (plumbing.EncodedObject, error) {
if len(iter.iters) == 0 {
return nil, io.EOF
}
@@ -200,25 +207,25 @@ func (iter *MultiObjectIter) Next() (plumbing.Object, error) {
// ForEach call the cb function for each object contained on this iter until
// an error happends or the end of the iter is reached. If ErrStop is sent
// the iteration is stop but no error is returned. The iterator is closed.
-func (iter *MultiObjectIter) ForEach(cb func(plumbing.Object) error) error {
+func (iter *MultiEncodedObjectIter) ForEach(cb func(plumbing.EncodedObject) error) error {
return ForEachIterator(iter, cb)
}
// Close releases any resources used by the iterator.
-func (iter *MultiObjectIter) Close() {
+func (iter *MultiEncodedObjectIter) Close() {
for _, i := range iter.iters {
i.Close()
}
}
type bareIterator interface {
- Next() (plumbing.Object, error)
+ Next() (plumbing.EncodedObject, error)
Close()
}
// ForEachIterator is a helper function to build iterators without need to
// rewrite the same ForEach function each time.
-func ForEachIterator(iter bareIterator, cb func(plumbing.Object) error) error {
+func ForEachIterator(iter bareIterator, cb func(plumbing.EncodedObject) error) error {
defer iter.Close()
for {
obj, err := iter.Next()
diff --git a/plumbing/storer/object_test.go b/plumbing/storer/object_test.go
index a0a7755..6bdd25c 100644
--- a/plumbing/storer/object_test.go
+++ b/plumbing/storer/object_test.go
@@ -11,14 +11,14 @@ import (
func Test(t *testing.T) { TestingT(t) }
type ObjectSuite struct {
- Objects []plumbing.Object
+ Objects []plumbing.EncodedObject
Hash []plumbing.Hash
}
var _ = Suite(&ObjectSuite{})
func (s *ObjectSuite) SetUpSuite(c *C) {
- s.Objects = []plumbing.Object{
+ s.Objects = []plumbing.EncodedObject{
s.buildObject([]byte("foo")),
s.buildObject([]byte("bar")),
}
@@ -29,7 +29,7 @@ func (s *ObjectSuite) SetUpSuite(c *C) {
}
func (s *ObjectSuite) TestMultiObjectIterNext(c *C) {
- expected := []plumbing.Object{
+ expected := []plumbing.EncodedObject{
&plumbing.MemoryObject{},
&plumbing.MemoryObject{},
&plumbing.MemoryObject{},
@@ -38,14 +38,14 @@ func (s *ObjectSuite) TestMultiObjectIterNext(c *C) {
&plumbing.MemoryObject{},
}
- iter := NewMultiObjectIter([]ObjectIter{
- NewObjectSliceIter(expected[0:2]),
- NewObjectSliceIter(expected[2:4]),
- NewObjectSliceIter(expected[4:5]),
+ iter := NewMultiEncodedObjectIter([]EncodedObjectIter{
+ NewEncodedObjectSliceIter(expected[0:2]),
+ NewEncodedObjectSliceIter(expected[2:4]),
+ NewEncodedObjectSliceIter(expected[4:5]),
})
var i int
- iter.ForEach(func(o plumbing.Object) error {
+ iter.ForEach(func(o plumbing.EncodedObject) error {
c.Assert(o, Equals, expected[i])
i++
return nil
@@ -54,7 +54,7 @@ func (s *ObjectSuite) TestMultiObjectIterNext(c *C) {
iter.Close()
}
-func (s *ObjectSuite) buildObject(content []byte) plumbing.Object {
+func (s *ObjectSuite) buildObject(content []byte) plumbing.EncodedObject {
o := &plumbing.MemoryObject{}
o.Write(content)
@@ -65,8 +65,8 @@ func (s *ObjectSuite) TestObjectLookupIter(c *C) {
var count int
storage := &MockObjectStorage{s.Objects}
- i := NewObjectLookupIter(storage, plumbing.CommitObject, s.Hash)
- err := i.ForEach(func(o plumbing.Object) error {
+ i := NewEncodedObjectLookupIter(storage, plumbing.CommitObject, s.Hash)
+ err := i.ForEach(func(o plumbing.EncodedObject) error {
c.Assert(o, NotNil)
c.Assert(o.Hash().String(), Equals, s.Hash[count].String())
count++
@@ -80,8 +80,8 @@ func (s *ObjectSuite) TestObjectLookupIter(c *C) {
func (s *ObjectSuite) TestObjectSliceIter(c *C) {
var count int
- i := NewObjectSliceIter(s.Objects)
- err := i.ForEach(func(o plumbing.Object) error {
+ i := NewEncodedObjectSliceIter(s.Objects)
+ err := i.ForEach(func(o plumbing.EncodedObject) error {
c.Assert(o, NotNil)
c.Assert(o.Hash().String(), Equals, s.Hash[count].String())
count++
@@ -94,10 +94,10 @@ func (s *ObjectSuite) TestObjectSliceIter(c *C) {
}
func (s *ObjectSuite) TestObjectSliceIterStop(c *C) {
- i := NewObjectSliceIter(s.Objects)
+ i := NewEncodedObjectSliceIter(s.Objects)
var count = 0
- err := i.ForEach(func(o plumbing.Object) error {
+ err := i.ForEach(func(o plumbing.EncodedObject) error {
c.Assert(o, NotNil)
c.Assert(o.Hash().String(), Equals, s.Hash[count].String())
count++
@@ -109,11 +109,11 @@ func (s *ObjectSuite) TestObjectSliceIterStop(c *C) {
}
func (s *ObjectSuite) TestObjectSliceIterError(c *C) {
- i := NewObjectSliceIter([]plumbing.Object{
+ i := NewEncodedObjectSliceIter([]plumbing.EncodedObject{
s.buildObject([]byte("foo")),
})
- err := i.ForEach(func(plumbing.Object) error {
+ err := i.ForEach(func(plumbing.EncodedObject) error {
return fmt.Errorf("a random error")
})
@@ -121,18 +121,18 @@ func (s *ObjectSuite) TestObjectSliceIterError(c *C) {
}
type MockObjectStorage struct {
- db []plumbing.Object
+ db []plumbing.EncodedObject
}
-func (o *MockObjectStorage) NewObject() plumbing.Object {
+func (o *MockObjectStorage) NewEncodedObject() plumbing.EncodedObject {
return nil
}
-func (o *MockObjectStorage) SetObject(obj plumbing.Object) (plumbing.Hash, error) {
+func (o *MockObjectStorage) SetEncodedObject(obj plumbing.EncodedObject) (plumbing.Hash, error) {
return plumbing.ZeroHash, nil
}
-func (o *MockObjectStorage) Object(t plumbing.ObjectType, h plumbing.Hash) (plumbing.Object, error) {
+func (o *MockObjectStorage) EncodedObject(t plumbing.ObjectType, h plumbing.Hash) (plumbing.EncodedObject, error) {
for _, o := range o.db {
if o.Hash() == h {
return o, nil
@@ -141,7 +141,7 @@ func (o *MockObjectStorage) Object(t plumbing.ObjectType, h plumbing.Hash) (plum
return nil, plumbing.ErrObjectNotFound
}
-func (o *MockObjectStorage) IterObjects(t plumbing.ObjectType) (ObjectIter, error) {
+func (o *MockObjectStorage) IterEncodedObjects(t plumbing.ObjectType) (EncodedObjectIter, error) {
return nil, nil
}