aboutsummaryrefslogtreecommitdiffstats
path: root/storage/seekable/internal/gitdir
diff options
context:
space:
mode:
authorAlberto Cortés <alcortesm@gmail.com>2016-07-04 17:09:22 +0200
committerMáximo Cuadros <mcuadros@gmail.com>2016-07-04 17:09:22 +0200
commit5e73f01cb2e027a8f02801635b79d3a9bc866914 (patch)
treec0e7eb355c9b8633d99bab9295cb72b6c3a9c0e1 /storage/seekable/internal/gitdir
parent808076af869550a200a3a544c9ee2fa22a8b6a85 (diff)
downloadgo-git-5e73f01cb2e027a8f02801635b79d3a9bc866914.tar.gz
Adds support to open local repositories and to use file-based object storage (#55)v3.1.0
* remove some comments * idx writer/reader * Shut up ssh tests, they are annoying * Add file scheme test to clients * Add dummy file client * Add test fot file client * Make tests use fixture endpoint * add parser for packed-refs format * add parser for packed-refs format * WIP adding dir.Refs() tests * Add test for fixture refs * refs parser for the refs directory * Documentation * Add Capabilities to file client * tgz.Exatract now accpets a path instead of a Reader * fix bug in idxfile fanout calculation * remove dead code * packfile documentation * clean packfile parser code * add core.Object.Content() and returns errors for core.ObjectStorage.Iter() * add seekable storage * add dir repos to NewRepository * clean prints * Add dir client documentation to README * Organize the README * README * Clean tgz package * Clean temp dirs after tgz tests * Gometalinter on gitdir * Clean pattern function * metalinter tgz * metalinter gitdir * gitdir coverage and remove seekable packfile filedescriptor leak * gitdir Idxfile tests and remove file descriptor leak * gitdir Idxfile tests when no idx is found * clean storage/seekable/internal/index and some formats/idxfile API issues * clean storage/seekable * clean formats/idx * turn packfile/doc.go into packfile/doc.txt * move formats/packfile/reader to decoder * fix packfile decoder error names * improve documentation * comment packfile decoder errors * comment public API (format/packfile) * remve duplicated code in packfile decoder test * move tracking_reader into an internal package and clean it * use iota for packfile format * rename packfile parse.go to packfile object_at.go * clean packfile deltas * fix delta header size bug * improve delta documentation * clean packfile deltas * clean packfiles deltas * clean repository.go * Remove go 1.5 from Travis CI Because go 1.5 does not suport internal packages. * change local repo scheme to local:// * change "local://" to "file://" as the local scheme * fix broken indentation * shortens names of variables in short scopes * more shortening of variable names * more shortening of variable names * Rename git dir client to "file", as the scheme used for it * Fix file format ctor name, now that the package name has change * Sortcut local repo constructor to not use remotes The object storage is build directly in the repository ctor, instead of creating a remote and waiting for the user to pull it. * update README and fix some errors in it * remove file scheme client * Local respositories has now a new ctor This is, they are no longer identified by the scheme of the URL, but are created different from inception. * remove unused URL field form Repository * move all git dir logic to seekable sotrage ctor * fix documentation * Make formats/file/dir an internal package to storage/seekable * change package storage/seekable to storage/fs * clean storage/fs * overall storage/fs clean * more cleaning * some metalinter fixes * upgrade cshared to last changes * remove dead code * fix test error info * remove file scheme check from clients * fix test error message * fix test error message * fix error messages * style changes * fix comments everywhere * style changes * style changes * scaffolding and tests for local packfiles without ifx files * outsource index building from packfile to the packfile decoder * refactor packfile header reading into a new function * move code to generate index from packfile back to index package * add header parsing * fix documentation errata * add undeltified and OFS delta support for index building from the packfile * add tests for packfile with ref-deltas * support for packfiles with ref-deltas and no idx * refactor packfile format parser to reuse code * refactor packfile format parser to reuse code * refactor packfile format parser to reuse code * refactor packfile format parser to reuse code * refactor packfile format parser to reuse code * WIP refactor packfile format parser to reuse code * refactor packfile format parser to reuse code * remove prints from tests * remove prints from tests * refactor packfile.core into packfile.parser * rename packfile reader to something that shows it is a recaller * rename cannot recall error * rename packfile.Reader to packfile.ReadRecaller and document * speed up test by using StreamReader instead of SeekableReader when possible * clean packfile StreamReader * stream_reader tests * refactor packfile.StreamReader into packfile.StreamReadRecaller * refactor packfile.SeekableReader into packfile.SeekableReadRecaller and document it * generalize packfile.StreamReadRecaller test to all packfile.ReadRecaller implementations * speed up storage/fs tests * speed up tests in . by loading packfiles in memory * speed up repository tests by using and smaller fixture * restore doc.go files * rename packfile.ReadRecaller implementations to shorter names * update comments to type changes * packfile.Parser test (WIP) * packfile.Parser tests and add ForgetAll() to packfile.ReadRecaller * add test for packfile.ReadRecaller.ForgetAll() * clarify seekable being able to recallByOffset forgetted objects * use better names for internal maps * metalinter packfile package * speed up some tests * documentation fixes * change storage.fs package name to storage.proxy to avoid confusion with new filesystem support * New fs package and os transparent implementation Now NewRepositoryFromFS receives a fs and a path and tests are modified accordingly, but it is still not using for anything. * add fs to gitdir and proxy.store * reduce fs interface for easier implementation * remove garbage dirs from tgz tests * change file name gitdir/dir.go to gitdir/gitdir.go * fs.OS tests * metalinter utils/fs * add NewRepositoryFromFS documentation to README * Readability fixes to README * move tgz to an external dependency * move filesystem impl. example to example dir * rename proxy/store.go to proxy/storage.go for coherence with memory/storage.go * rename proxy package to seekable
Diffstat (limited to 'storage/seekable/internal/gitdir')
-rw-r--r--storage/seekable/internal/gitdir/gitdir.go145
-rw-r--r--storage/seekable/internal/gitdir/gitdir_test.go263
-rw-r--r--storage/seekable/internal/gitdir/refs.go152
3 files changed, 560 insertions, 0 deletions
diff --git a/storage/seekable/internal/gitdir/gitdir.go b/storage/seekable/internal/gitdir/gitdir.go
new file mode 100644
index 0000000..bfdf030
--- /dev/null
+++ b/storage/seekable/internal/gitdir/gitdir.go
@@ -0,0 +1,145 @@
+package gitdir
+
+import (
+ "errors"
+ "io/ioutil"
+ "os"
+ "strings"
+
+ "gopkg.in/src-d/go-git.v3/clients/common"
+ "gopkg.in/src-d/go-git.v3/core"
+ "gopkg.in/src-d/go-git.v3/utils/fs"
+)
+
+const (
+ suffix = ".git"
+ packedRefsPath = "packed-refs"
+)
+
+var (
+ // ErrNotFound is returned by New when the path is not found.
+ ErrNotFound = errors.New("path not found")
+ // ErrIdxNotFound is returned by Idxfile when the idx file is not found on the
+ // repository.
+ ErrIdxNotFound = errors.New("idx file not found")
+ // ErrPackfileNotFound is returned by Packfile when the packfile is not found
+ // on the repository.
+ ErrPackfileNotFound = errors.New("packfile not found")
+)
+
+// The GitDir type represents a local git repository on disk. This
+// type is not zero-value-safe, use the New function to initialize it.
+type GitDir struct {
+ fs fs.FS
+ path string
+ refs map[string]core.Hash
+ packDir string
+}
+
+// New returns a GitDir value ready to be used. The path argument must
+// be the absolute path of a git repository directory (e.g.
+// "/foo/bar/.git").
+func New(fs fs.FS, path string) (*GitDir, error) {
+ d := &GitDir{}
+ d.fs = fs
+ d.path = path
+ d.packDir = d.fs.Join(d.path, "objects", "pack")
+
+ if _, err := fs.Stat(path); err != nil {
+ if os.IsNotExist(err) {
+ return nil, ErrNotFound
+ }
+ return nil, err
+ }
+
+ return d, nil
+}
+
+// Refs scans the git directory collecting references, which it returns.
+// Symbolic references are resolved and included in the output.
+func (d *GitDir) Refs() (map[string]core.Hash, error) {
+ var err error
+
+ d.refs = make(map[string]core.Hash)
+
+ if err = d.addRefsFromPackedRefs(); err != nil {
+ return nil, err
+ }
+
+ if err = d.addRefsFromRefDir(); err != nil {
+ return nil, err
+ }
+
+ return d.refs, err
+}
+
+// Capabilities scans the git directory collection capabilities, which it returns.
+func (d *GitDir) Capabilities() (*common.Capabilities, error) {
+ c := common.NewCapabilities()
+
+ err := d.addSymRefCapability(c)
+
+ return c, err
+}
+
+func (d *GitDir) addSymRefCapability(cap *common.Capabilities) (err error) {
+ f, err := d.fs.Open(d.fs.Join(d.path, "HEAD"))
+ if err != nil {
+ return err
+ }
+
+ defer func() {
+ errClose := f.Close()
+ if err == nil {
+ err = errClose
+ }
+ }()
+
+ b, err := ioutil.ReadAll(f)
+ if err != nil {
+ return err
+ }
+ data := strings.TrimSpace(string(b))
+
+ c := "symref"
+ ref := strings.TrimPrefix(data, symRefPrefix)
+ cap.Set(c, "HEAD:"+ref)
+
+ return nil
+}
+
+// Packfile returns the path of the packfile (really, it returns the
+// path of the first file in the "objects/pack/" directory with a
+// ".pack" extension.
+func (d *GitDir) Packfile() (fs.FS, string, error) {
+ files, err := d.fs.ReadDir(d.packDir)
+ if err != nil {
+ return nil, "", err
+ }
+
+ for _, f := range files {
+ if strings.HasSuffix(f.Name(), ".pack") {
+ return d.fs, d.fs.Join(d.packDir, f.Name()), nil
+ }
+ }
+
+ return nil, "", ErrPackfileNotFound
+}
+
+// Packfile returns the path of the idx file (really, it returns the
+// path of the first file in the "objects/pack/" directory with an
+// ".idx" extension.
+func (d *GitDir) Idxfile() (fs.FS, string, error) {
+ files, err := d.fs.ReadDir(d.packDir)
+ if err != nil {
+ return nil, "", err
+ }
+
+ for _, f := range files {
+ if strings.HasSuffix(f.Name(), ".idx") {
+ return d.fs, d.fs.Join(d.packDir, f.Name()), nil
+ }
+ }
+
+ return nil, "", ErrIdxNotFound
+}
diff --git a/storage/seekable/internal/gitdir/gitdir_test.go b/storage/seekable/internal/gitdir/gitdir_test.go
new file mode 100644
index 0000000..7504119
--- /dev/null
+++ b/storage/seekable/internal/gitdir/gitdir_test.go
@@ -0,0 +1,263 @@
+package gitdir
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "gopkg.in/src-d/go-git.v3/clients/common"
+ "gopkg.in/src-d/go-git.v3/core"
+ "gopkg.in/src-d/go-git.v3/utils/fs"
+
+ "github.com/alcortesm/tgz"
+ . "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) { TestingT(t) }
+
+var initFixtures = [...]struct {
+ name string
+ tgz string
+ capabilities [][2]string
+ packfile string
+ idxfile string
+}{
+ {
+ name: "spinnaker",
+ tgz: "fixtures/spinnaker-gc.tgz",
+ capabilities: [][2]string{
+ {"symref", "HEAD:refs/heads/master"},
+ },
+ packfile: "objects/pack/pack-584416f86235cac0d54bfabbdc399fb2b09a5269.pack",
+ idxfile: "objects/pack/pack-584416f86235cac0d54bfabbdc399fb2b09a5269.idx",
+ }, {
+ name: "no-packfile-no-idx",
+ tgz: "fixtures/no-packfile-no-idx.tgz",
+ }, {
+ name: "empty",
+ tgz: "fixtures/empty-gitdir.tgz",
+ },
+}
+
+type fixture struct {
+ installDir string
+ fs fs.FS
+ path string // repo names to paths of the extracted tgz
+ capabilities *common.Capabilities // expected capabilities
+ packfile string // path of the packfile
+ idxfile string // path of the idxfile
+}
+
+type SuiteGitDir struct {
+ fixtures map[string]fixture
+}
+
+var _ = Suite(&SuiteGitDir{})
+
+func (s *SuiteGitDir) SetUpSuite(c *C) {
+ s.fixtures = make(map[string]fixture, len(initFixtures))
+
+ for _, init := range initFixtures {
+ com := Commentf("fixture name = %s\n", init.name)
+
+ path, err := tgz.Extract(init.tgz)
+ c.Assert(err, IsNil, com)
+
+ f := fixture{}
+
+ f.installDir = path
+ f.fs = fs.NewOS()
+ f.path = f.fs.Join(path, ".git")
+
+ f.capabilities = common.NewCapabilities()
+ for _, pair := range init.capabilities {
+ f.capabilities.Add(pair[0], pair[1])
+ }
+
+ f.packfile = init.packfile
+ f.idxfile = init.idxfile
+
+ s.fixtures[init.name] = f
+ }
+}
+
+func (s *SuiteGitDir) TearDownSuite(c *C) {
+ for n, f := range s.fixtures {
+ err := os.RemoveAll(f.installDir)
+ c.Assert(err, IsNil, Commentf("cannot delete tmp dir for fixture %s: %s\n",
+ n, f.installDir))
+ }
+}
+
+func (s *SuiteGitDir) TestNewErrors(c *C) {
+ for i, test := range [...]struct {
+ input string
+ err error
+ }{
+ {
+ input: "./tmp/foo",
+ err: ErrNotFound,
+ }, {
+ input: "./tmp/foo/.git",
+ err: ErrNotFound,
+ },
+ } {
+ com := Commentf("subtest %d", i)
+
+ _, err := New(fs.NewOS(), test.input)
+ c.Assert(err, Equals, test.err, com)
+ }
+}
+
+func (s *SuiteGitDir) TestRefs(c *C) {
+ for i, test := range [...]struct {
+ fixture string
+ refs map[string]core.Hash
+ }{
+ {
+ fixture: "spinnaker",
+ refs: map[string]core.Hash{
+ "refs/heads/master": core.NewHash("409db80e56365049edb704f2ecbd449ddf64dc0d"),
+ "refs/remotes/origin/HEAD": core.NewHash("409db80e56365049edb704f2ecbd449ddf64dc0d"),
+ "refs/remotes/origin/explicit-machine-type": core.NewHash("f262e833a215c90b703115691f03f182c1be4b91"),
+ "refs/remotes/origin/fix-aws-creds-copy": core.NewHash("871cf4d673e0d94c6eb2558bfc7a525c2bc7e538"),
+ "refs/remotes/origin/kubernetes-no-gcloud": core.NewHash("0b553b5b6fa773f3d7a38b229d9f75627c0762aa"),
+ "refs/remotes/origin/lwander-patch-igor": core.NewHash("9c987f44908bc9aa05e950347cd03228ba199630"),
+ "refs/remotes/origin/master": core.NewHash("409db80e56365049edb704f2ecbd449ddf64dc0d"),
+ "refs/remotes/origin/revert-898-codelab-script-fix": core.NewHash("426cd84d1741d0ff68bad646bc8499b1f163a893"),
+ "refs/remotes/origin/terraform-aws-prototype": core.NewHash("a34445e7d2e758a8c953fa3a357198ec09fcba88"),
+ "refs/remotes/origin/typo": core.NewHash("86b48b962e599c096a5870cd8047778bb32a6e1e"),
+ "refs/tags/v0.10.0": core.NewHash("d081d66c2a76d04ff479a3431dc36e44116fde40"),
+ "refs/tags/v0.11.0": core.NewHash("3e349f806a0d02bf658c3544c46a0a7a9ee78673"),
+ "refs/tags/v0.12.0": core.NewHash("82562fa518f0a2e2187ea2604b07b67f2e7049ae"),
+ "refs/tags/v0.13.0": core.NewHash("48b655898fa9c72d62e8dd73b022ecbddd6e4cc2"),
+ "refs/tags/v0.14.0": core.NewHash("7ecc2ad58e24a5b52504985467a10c6a3bb85b9b"),
+ "refs/tags/v0.15.0": core.NewHash("740e3adff4c350899db7772f8f537d1d0d96ec75"),
+ "refs/tags/v0.16.0": core.NewHash("466ca58a3129f1b2ead117a43535ecb410d621ac"),
+ "refs/tags/v0.17.0": core.NewHash("48020cb7a45603d47e6041de072fe0665e47676f"),
+ "refs/tags/v0.18.0": core.NewHash("6fcb9036ab4d921dbdab41baf923320484a11188"),
+ "refs/tags/v0.19.0": core.NewHash("a2ce1f4c9d0bde4e93dfcb90a445ed069030640c"),
+ "refs/tags/v0.20.0": core.NewHash("974f476f0ec5a9dcc4bb005384d449f0a5122da4"),
+ "refs/tags/v0.21.0": core.NewHash("e08e3917f3a0487e33cd6dcef24fe03e570b73f5"),
+ "refs/tags/v0.22.0": core.NewHash("834612b4f181171d5e1e263b4e7e55d609ab19f5"),
+ "refs/tags/v0.23.0": core.NewHash("65558da39c07a6f9104651281c226981e880b49c"),
+ "refs/tags/v0.24.0": core.NewHash("5c97aa1f2f784e92f065055f9e79df83fac7a4aa"),
+ "refs/tags/v0.25.0": core.NewHash("d6e696f9d5e2dac968638665886e2300ae15709a"),
+ "refs/tags/v0.26.0": core.NewHash("974861702abd8388e0507cf3f348d6d3c40acef4"),
+ "refs/tags/v0.27.0": core.NewHash("65771ef145b3e07e130abc84fb07f0b8044fcf59"),
+ "refs/tags/v0.28.0": core.NewHash("5d86433d6dc4358277a5e9a834948f0822225a6d"),
+ "refs/tags/v0.29.0": core.NewHash("c1582497c23d81e61963841861c5aebbf10e12ab"),
+ "refs/tags/v0.3.0": core.NewHash("8b6002b614b454d45bafbd244b127839421f92ff"),
+ "refs/tags/v0.30.0": core.NewHash("b0f26484aab0afe2f342be84583213c3c64b7eb3"),
+ "refs/tags/v0.31.0": core.NewHash("8a2da11c9d29e3a879a068c197568c108b9e5f88"),
+ "refs/tags/v0.32.0": core.NewHash("5c5fc48a1506bb4609ca5588f90cf021a29a4a37"),
+ "refs/tags/v0.33.0": core.NewHash("d443f1f61e23411d9ac08f0fc6bbeb8e4c46ee39"),
+ "refs/tags/v0.34.0": core.NewHash("0168d74697d65cde65f931254c09a6bd7ff4f0d5"),
+ "refs/tags/v0.35.0": core.NewHash("a46303084ad9decf71a8ea9fd1529e22c6fdd2c4"),
+ "refs/tags/v0.36.0": core.NewHash("4da0d7bb89e85bd5f14ff36d983a0ae773473b2d"),
+ "refs/tags/v0.37.0": core.NewHash("85ec60477681933961c9b64c18ada93220650ac5"),
+ "refs/tags/v0.4.0": core.NewHash("95ee6e6c750ded1f4dc5499bad730ce3f58c6c3a"),
+ "refs/tags/v0.5.0": core.NewHash("0a3fb06ff80156fb153bcdcc58b5e16c2d27625c"),
+ "refs/tags/v0.6.0": core.NewHash("dc22e2035292ccf020c30d226f3cc2da651773f6"),
+ "refs/tags/v0.7.0": core.NewHash("3f36d8f1d67538afd1f089ffd0d242fc4fda736f"),
+ "refs/tags/v0.8.0": core.NewHash("8526c58617f68de076358873b8aa861a354b48a9"),
+ "refs/tags/v0.9.0": core.NewHash("776914ef8a097f5683957719c49215a5db17c2cb"),
+ },
+ },
+ } {
+ com := Commentf("subtest %d", i)
+ _, d := s.newFixtureDir(c, test.fixture)
+
+ refs, err := d.Refs()
+ c.Assert(err, IsNil, com)
+ c.Assert(refs, DeepEquals, test.refs, com)
+ }
+}
+
+func (s *SuiteGitDir) newFixtureDir(c *C, fixName string) (*fixture, *GitDir) {
+ f, ok := s.fixtures[fixName]
+ c.Assert(ok, Equals, true)
+
+ d, err := New(fs.NewOS(), f.path)
+ c.Assert(err, IsNil)
+
+ return &f, d
+}
+
+func (s *SuiteGitDir) TestCapabilities(c *C) {
+ for i, test := range [...]struct {
+ fixture string
+ capabilities *common.Capabilities
+ }{
+ {
+ fixture: "spinnaker",
+ },
+ } {
+ com := Commentf("subtest %d", i)
+ f, d := s.newFixtureDir(c, test.fixture)
+
+ caps, err := d.Capabilities()
+ c.Assert(err, IsNil, com)
+ c.Assert(caps, DeepEquals, f.capabilities, com)
+ }
+}
+
+func (s *SuiteGitDir) TestPackfile(c *C) {
+ packfile := func(d *GitDir) (fs.FS, string, error) {
+ return d.Packfile()
+ }
+ idxfile := func(d *GitDir) (fs.FS, string, error) {
+ return d.Idxfile()
+ }
+ for _, test := range [...]struct {
+ fixture string
+ fn getPathFn
+ err string // error regexp
+ }{
+ {
+ fixture: "spinnaker",
+ fn: packfile,
+ }, {
+ fixture: "spinnaker",
+ fn: idxfile,
+ }, {
+ fixture: "empty",
+ fn: packfile,
+ err: ".* no such file or directory",
+ }, {
+ fixture: "empty",
+ fn: idxfile,
+ err: ".* no such file or directory",
+ }, {
+ fixture: "no-packfile-no-idx",
+ fn: packfile,
+ err: "packfile not found",
+ }, {
+ fixture: "no-packfile-no-idx",
+ fn: idxfile,
+ err: "idx file not found",
+ },
+ } {
+ com := Commentf("fixture = %s", test.fixture)
+
+ fix, dir := s.newFixtureDir(c, test.fixture)
+
+ _, path, err := test.fn(dir)
+
+ if test.err != "" {
+ c.Assert(err, ErrorMatches, test.err, com)
+ } else {
+ c.Assert(err, IsNil, com)
+ c.Assert(strings.HasSuffix(noExt(path), noExt(fix.packfile)),
+ Equals, true, com)
+ }
+ }
+}
+
+type getPathFn func(*GitDir) (fs.FS, string, error)
+
+func noExt(path string) string {
+ ext := filepath.Ext(path)
+ return path[0 : len(path)-len(ext)]
+}
diff --git a/storage/seekable/internal/gitdir/refs.go b/storage/seekable/internal/gitdir/refs.go
new file mode 100644
index 0000000..9c2e8fb
--- /dev/null
+++ b/storage/seekable/internal/gitdir/refs.go
@@ -0,0 +1,152 @@
+package gitdir
+
+import (
+ "bufio"
+ "errors"
+ "io/ioutil"
+ "os"
+ "strings"
+
+ "gopkg.in/src-d/go-git.v3/core"
+)
+
+var (
+ // ErrPackedRefsDuplicatedRef is returned when a duplicated
+ // reference is found in the packed-ref file. This is usually the
+ // case for corrupted git repositories.
+ ErrPackedRefsDuplicatedRef = errors.New("duplicated ref found in packed-ref file")
+ // ErrPackedRefsBadFormat is returned when the packed-ref file
+ // corrupt.
+ ErrPackedRefsBadFormat = errors.New("malformed packed-ref")
+ // ErrSymRefTargetNotFound is returned when a symbolic reference is
+ // targeting a non-existing object. This usually means the
+ // repository is corrupt.
+ ErrSymRefTargetNotFound = errors.New("symbolic reference target not found")
+)
+
+const (
+ symRefPrefix = "ref: "
+)
+
+func (d *GitDir) addRefsFromPackedRefs() (err error) {
+ path := d.fs.Join(d.path, packedRefsPath)
+ f, err := d.fs.Open(path)
+ if err != nil {
+ if err == os.ErrNotExist {
+ return nil
+ }
+ return err
+ }
+ defer func() {
+ errClose := f.Close()
+ if err == nil {
+ err = errClose
+ }
+ }()
+
+ s := bufio.NewScanner(f)
+ for s.Scan() {
+ line := s.Text()
+ if err = d.processLine(line); err != nil {
+ return err
+ }
+ }
+
+ return s.Err()
+}
+
+// process lines from a packed-refs file
+func (d *GitDir) processLine(line string) error {
+ switch line[0] {
+ case '#': // comment - ignore
+ return nil
+ case '^': // annotated tag commit of the previous line - ignore
+ return nil
+ default:
+ ws := strings.Split(line, " ") // hash then ref
+ if len(ws) != 2 {
+ return ErrPackedRefsBadFormat
+ }
+ h, r := ws[0], ws[1]
+
+ if _, ok := d.refs[r]; ok {
+ return ErrPackedRefsDuplicatedRef
+ }
+ d.refs[r] = core.NewHash(h)
+ }
+
+ return nil
+}
+
+func (d *GitDir) addRefsFromRefDir() error {
+ return d.walkTree("refs")
+}
+
+func (d *GitDir) walkTree(relPath string) error {
+ files, err := d.fs.ReadDir(d.fs.Join(d.path, relPath))
+ if err != nil {
+ return err
+ }
+
+ for _, f := range files {
+ newRelPath := d.fs.Join(relPath, f.Name())
+
+ if f.IsDir() {
+ if err = d.walkTree(newRelPath); err != nil {
+ return err
+ }
+ } else {
+ filePath := d.fs.Join(d.path, newRelPath)
+ h, err := d.readHashFile(filePath)
+ if err != nil {
+ return err
+ }
+ d.refs[newRelPath] = h
+ }
+ }
+
+ return nil
+}
+
+// ReadHashFile reads a single hash from a file. If a symbolic
+// reference is found instead of a hash, the reference is resolved and
+// the proper hash is returned.
+func (d *GitDir) readHashFile(path string) (h core.Hash, err error) {
+ f, err := d.fs.Open(path)
+ if err != nil {
+ return core.ZeroHash, err
+ }
+ defer func() {
+ errClose := f.Close()
+ if err == nil {
+ err = errClose
+ }
+ }()
+
+ b, err := ioutil.ReadAll(f)
+ if err != nil {
+ return core.ZeroHash, err
+ }
+ line := strings.TrimSpace(string(b))
+
+ if isSymRef(line) {
+ return d.resolveSymRef(line)
+ }
+
+ return core.NewHash(line), nil
+}
+
+func isSymRef(contents string) bool {
+ return strings.HasPrefix(contents, symRefPrefix)
+}
+
+func (d *GitDir) resolveSymRef(symRef string) (core.Hash, error) {
+ ref := strings.TrimPrefix(symRef, symRefPrefix)
+
+ hash, ok := d.refs[ref]
+ if !ok {
+ return core.ZeroHash, ErrSymRefTargetNotFound
+ }
+
+ return hash, nil
+}