aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--examples/basic/main.go6
-rw-r--r--fixtures/fixtures.go60
-rw-r--r--formats/packfile/decoder.go57
-rw-r--r--formats/packfile/decoder_test.go43
-rw-r--r--formats/packfile/scanner.go (renamed from formats/packfile/parser.go)30
-rw-r--r--formats/packfile/scanner_test.go (renamed from formats/packfile/parser_test.go)4
-rw-r--r--remote.go2
-rw-r--r--storage/filesystem/internal/dotgit/dotgit.go13
-rw-r--r--storage/filesystem/object.go15
-rw-r--r--storage/filesystem/object_test.go20
10 files changed, 161 insertions, 89 deletions
diff --git a/examples/basic/main.go b/examples/basic/main.go
index 1d59a6b..d92002a 100644
--- a/examples/basic/main.go
+++ b/examples/basic/main.go
@@ -10,7 +10,7 @@ import (
)
func main() {
- r := git.NewMemoryRepository()
+ r, _ := git.NewFilesystemRepository(".git")
// Clone the given repository, creating the remote, the local branches
// and fetching the objects, exactly as:
@@ -28,8 +28,8 @@ func main() {
// ... retrieving the branch being pointed by HEAD
ref, _ := r.Head()
// ... retrieving the commit object
- commit, _ := r.Commit(ref.Hash())
- fmt.Println(commit)
+ commit, err := r.Commit(ref.Hash())
+ fmt.Println(commit, err)
// List the tree from HEAD
// > git ls-tree -r HEAD
diff --git a/fixtures/fixtures.go b/fixtures/fixtures.go
index 59cbd2a..8e1c0ed 100644
--- a/fixtures/fixtures.go
+++ b/fixtures/fixtures.go
@@ -17,7 +17,9 @@ var RootFolder = ""
const DataFolder = "data"
-var fixtures = []*Fixture{{
+var folders []string
+
+var fixtures = Fixtures{{
Tags: []string{"packfile", "ofs-delta", ".git"},
URL: "https://github.com/git-fixtures/basic",
Head: core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"),
@@ -25,10 +27,11 @@ var fixtures = []*Fixture{{
DotGitHash: core.NewHash("0a00a25543e6d732dbf4e8e9fec55c8e65fc4e8d"),
ObjectsCount: 31,
}, {
- Tags: []string{"packfile", "ref-delta"},
+ Tags: []string{"packfile", "ref-delta", ".git"},
URL: "https://github.com/git-fixtures/basic",
Head: core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"),
PackfileHash: core.NewHash("c544593473465e6315ad4182d04d366c4592b829"),
+ DotGitHash: core.NewHash("7cbde0ca02f13aedd5ec8b358ca17b1c0bf5ee64"),
ObjectsCount: 31,
}, {
Tags: []string{".git", "unpacked", "multi-packfile"},
@@ -51,27 +54,11 @@ func Basic() Fixtures {
}
func ByURL(url string) Fixtures {
- r := make(Fixtures, 0)
- for _, f := range fixtures {
- if f.URL == url {
- r = append(r, f)
- }
- }
-
- return r
+ return fixtures.ByURL(url)
}
func ByTag(tag string) Fixtures {
- r := make(Fixtures, 0)
- for _, f := range fixtures {
- for _, t := range f.Tags {
- if t == tag {
- r = append(r, f)
- }
- }
- }
-
- return r
+ return fixtures.ByTag(tag)
}
type Fixture struct {
@@ -83,6 +70,16 @@ type Fixture struct {
ObjectsCount int32
}
+func (f *Fixture) Is(tag string) bool {
+ for _, t := range f.Tags {
+ if t == tag {
+ return true
+ }
+ }
+
+ return false
+}
+
func (f *Fixture) Packfile() io.ReadSeeker {
fn := filepath.Join(RootFolder, DataFolder, fmt.Sprintf("pack-%s.pack", f.PackfileHash))
file, err := os.Open(fn)
@@ -109,6 +106,8 @@ func (f *Fixture) DotGit() fs.Filesystem {
if err != nil {
panic(err)
}
+
+ folders = append(folders, path)
return fs.NewOS(path)
}
@@ -125,14 +124,23 @@ func (g Fixtures) One() *Fixture {
return g[0]
}
-func (g Fixtures) ByTag(tag string) *Fixture {
+func (g Fixtures) ByTag(tag string) Fixtures {
+ r := make(Fixtures, 0)
for _, f := range g {
- for _, t := range f.Tags {
- if t == tag {
- return f
- }
+ if f.Is(tag) {
+ r = append(r, f)
}
}
- return nil
+ return r
+}
+func (g Fixtures) ByURL(url string) Fixtures {
+ r := make(Fixtures, 0)
+ for _, f := range g {
+ if f.URL == url {
+ r = append(r, f)
+ }
+ }
+
+ return r
}
diff --git a/formats/packfile/decoder.go b/formats/packfile/decoder.go
index 23a8e1a..c4b9182 100644
--- a/formats/packfile/decoder.go
+++ b/formats/packfile/decoder.go
@@ -43,8 +43,9 @@ type Decoder struct {
o core.ObjectStorage
tx core.TxObjectStorage
- offsets map[int64]core.Hash
- crcs map[core.Hash]uint32
+ offsetToHash map[int64]core.Hash
+ hashToOffset map[core.Hash]int64
+ crcs map[core.Hash]uint32
}
// NewDecoder returns a new Decoder that reads from r.
@@ -54,8 +55,9 @@ func NewDecoder(s *Scanner, o core.ObjectStorage) *Decoder {
o: o,
tx: o.Begin(),
- offsets: make(map[int64]core.Hash, 0),
- crcs: make(map[core.Hash]uint32, 0),
+ offsetToHash: make(map[int64]core.Hash, 0),
+ hashToOffset: make(map[core.Hash]int64, 0),
+ crcs: make(map[core.Hash]uint32, 0),
}
}
@@ -82,11 +84,7 @@ func (d *Decoder) doDecode() error {
return err
}
- if err := d.tx.Commit(); err != nil {
- return err
- }
-
- return nil
+ return d.tx.Commit()
}
func (d *Decoder) readObjects(count uint32) error {
@@ -126,7 +124,9 @@ func (d *Decoder) ReadObject() (core.Object, error) {
return obj, err
}
- d.remember(obj, h.Offset, crc)
+ hash := obj.Hash()
+ d.setOffset(hash, h.Offset)
+ d.setCRC(hash, crc)
if _, err := d.tx.Set(obj); err != nil {
return nil, err
@@ -194,34 +194,45 @@ func (d *Decoder) fillOFSDeltaObjectContent(obj core.Object, offset int64) (uint
return crc, ApplyDelta(obj, base, buf.Bytes())
}
-func (d *Decoder) remember(obj core.Object, offset int64, crc uint32) {
- h := obj.Hash()
+func (d *Decoder) setOffset(h core.Hash, offset int64) {
+ d.offsetToHash[offset] = h
+ d.hashToOffset[h] = offset
+}
- d.offsets[offset] = h
+func (d *Decoder) setCRC(h core.Hash, crc uint32) {
d.crcs[h] = crc
}
func (d *Decoder) recallByOffset(o int64) (core.Object, error) {
- h, ok := d.offsets[o]
- if ok {
- return d.recallByHash(h)
+ if h, ok := d.offsetToHash[o]; ok {
+ return d.tx.Get(core.AnyObject, h)
}
return d.ReadObjectAt(o)
}
func (d *Decoder) recallByHash(h core.Hash) (core.Object, error) {
- return d.tx.Get(core.AnyObject, h)
+ obj, err := d.tx.Get(core.AnyObject, h)
+ if err != core.ErrObjectNotFound {
+ return obj, err
+ }
+
+ if o, ok := d.hashToOffset[h]; ok {
+ return d.ReadObjectAt(o)
+ }
+
+ return nil, core.ErrObjectNotFound
+}
+
+// SetOffsets sets the offsets, required when using the method ReadObjectAt,
+// without decoding the full packfile
+func (d *Decoder) SetOffsets(offsets map[core.Hash]int64) {
+ d.hashToOffset = offsets
}
// Offsets returns the objects read offset
func (d *Decoder) Offsets() map[core.Hash]int64 {
- i := make(map[core.Hash]int64, len(d.offsets))
- for o, h := range d.offsets {
- i[h] = o
- }
-
- return i
+ return d.hashToOffset
}
// CRCs returns the CRC-32 for each objected read
diff --git a/formats/packfile/decoder_test.go b/formats/packfile/decoder_test.go
index 7baab44..d85f3bf 100644
--- a/formats/packfile/decoder_test.go
+++ b/formats/packfile/decoder_test.go
@@ -1,10 +1,12 @@
package packfile
import (
+ "io"
"testing"
"gopkg.in/src-d/go-git.v4/core"
"gopkg.in/src-d/go-git.v4/fixtures"
+ "gopkg.in/src-d/go-git.v4/formats/idxfile"
"gopkg.in/src-d/go-git.v4/storage/memory"
. "gopkg.in/check.v1"
@@ -68,7 +70,7 @@ func (s *ReaderSuite) TestDecode(c *C) {
})
}
func (s *ReaderSuite) TestDecodeCRCs(c *C) {
- f := fixtures.Basic().ByTag("ofs-delta")
+ f := fixtures.Basic().ByTag("ofs-delta").One()
scanner := NewScanner(f.Packfile())
storage := memory.NewStorage()
@@ -86,18 +88,24 @@ func (s *ReaderSuite) TestDecodeCRCs(c *C) {
}
func (s *ReaderSuite) TestReadObjectAt(c *C) {
- f := fixtures.Basic().One()
+ fixtures.Basic().Test(c, func(f *fixtures.Fixture) {
+ scanner := NewScanner(f.Packfile())
+ storage := memory.NewStorage()
- scanner := NewScanner(f.Packfile())
- storage := memory.NewStorage()
+ d := NewDecoder(scanner, storage.ObjectStorage())
- d := NewDecoder(scanner, storage.ObjectStorage())
+ // when the packfile is ref-delta based, the offsets are required
+ if f.Is("ref-delta") {
+ offsets := getOffsetsFromIdx(f.Idx())
+ d.SetOffsets(offsets)
+ }
- // the objects at reference 186, is a delta, so should be recall, without
- // being read before.
- obj, err := d.ReadObjectAt(186)
- c.Assert(err, IsNil)
- c.Assert(obj.Hash().String(), Equals, "6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
+ // the objects at reference 186, is a delta, so should be recall,
+ // without being read before.
+ obj, err := d.ReadObjectAt(186)
+ c.Assert(err, IsNil)
+ c.Assert(obj.Hash().String(), Equals, "6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
+ })
}
func AssertObjects(c *C, s *memory.Storage, expects []string) {
@@ -110,3 +118,18 @@ func AssertObjects(c *C, s *memory.Storage, expects []string) {
c.Assert(obt.Hash().String(), Equals, exp)
}
}
+
+func getOffsetsFromIdx(r io.Reader) map[core.Hash]int64 {
+ idx := &idxfile.Idxfile{}
+ err := idxfile.NewDecoder(r).Decode(idx)
+ if err != nil {
+ panic(err)
+ }
+
+ offsets := make(map[core.Hash]int64)
+ for _, e := range idx.Entries {
+ offsets[e.Hash] = int64(e.Offset)
+ }
+
+ return offsets
+}
diff --git a/formats/packfile/parser.go b/formats/packfile/scanner.go
index 6fa2f42..86092a1 100644
--- a/formats/packfile/parser.go
+++ b/formats/packfile/scanner.go
@@ -41,8 +41,6 @@ type ObjectHeader struct {
OffsetReference int64
}
-// A Parser is a collection of functions to read and process data form a packfile.
-// Values from this type are not zero-value safe. See the NewParser function bellow.
type Scanner struct {
r reader
crc hash.Hash32
@@ -53,18 +51,22 @@ type Scanner struct {
version, objects uint32
}
-// NewParser returns a new Parser that reads from the packfile represented by r.
-func NewScannerFromReader(r io.Reader) *Scanner {
- s := &trackableReader{Reader: r}
- return NewScanner(s)
-}
+// NewScanner returns a new Scanner based on a reader, if the given reader
+// implements io.ReadSeeker the Scanner will be also Seekable
+func NewScanner(r io.Reader) *Scanner {
+ seeker, ok := r.(io.ReadSeeker)
+ if !ok {
+ seeker = &trackableReader{Reader: r}
+ }
-func NewScanner(r io.ReadSeeker) *Scanner {
crc := crc32.NewIEEE()
- seeker := newByteReadSeeker(r)
- tee := &teeReader{seeker, crc}
-
- return &Scanner{r: tee, crc: crc}
+ return &Scanner{
+ r: &teeReader{
+ newByteReadSeeker(seeker),
+ crc,
+ },
+ crc: crc,
+ }
}
// Header reads the whole packfile header (signature, version and object count).
@@ -265,6 +267,8 @@ func (s *Scanner) readLength(first byte) (int64, error) {
return length, nil
}
+// NextObject writes the content of the next object into the reader, returns
+// the number of bytes written, the CRC32 of the content and an error, if any
func (s *Scanner) NextObject(w io.Writer) (written int64, crc32 uint32, err error) {
defer s.crc.Reset()
@@ -308,6 +312,7 @@ func (s *Scanner) Seek(offset int64) (previous int64, err error) {
return previous, err
}
+// Checksum returns the checksum of the packfile
func (s *Scanner) Checksum() (core.Hash, error) {
err := s.discardObjectIfNeeded()
if err != nil {
@@ -383,6 +388,7 @@ func (s *Scanner) readByte() (byte, error) {
return b, err
}
+// Close reads the reader until io.EOF
func (s *Scanner) Close() error {
_, err := io.Copy(ioutil.Discard, s.r)
return err
diff --git a/formats/packfile/parser_test.go b/formats/packfile/scanner_test.go
index 2ff2887..6161fdb 100644
--- a/formats/packfile/parser_test.go
+++ b/formats/packfile/scanner_test.go
@@ -49,7 +49,7 @@ func (s *ScannerSuite) TestNextObjectHeaderOFSDelta(c *C) {
}
func (s *ScannerSuite) testNextObjectHeader(c *C, tag string, expected []ObjectHeader) {
- r := fixtures.Basic().ByTag(tag).Packfile()
+ r := fixtures.Basic().ByTag(tag).One().Packfile()
p := NewScanner(r)
_, objects, err := p.Header()
@@ -72,7 +72,7 @@ func (s *ScannerSuite) testNextObjectHeader(c *C, tag string, expected []ObjectH
}
func (s *ScannerSuite) TestNextObjectHeaderWithOutReadObject(c *C) {
- f := fixtures.Basic().ByTag("ref-delta")
+ f := fixtures.Basic().ByTag("ref-delta").One()
r := f.Packfile()
p := NewScanner(r)
diff --git a/remote.go b/remote.go
index c654422..8e2d8a8 100644
--- a/remote.go
+++ b/remote.go
@@ -173,7 +173,7 @@ func (r *Remote) updateObjectStorage(reader io.Reader) error {
return err
}
- stream := packfile.NewScannerFromReader(reader)
+ stream := packfile.NewScanner(reader)
d := packfile.NewDecoder(stream, s)
_, err := d.Decode()
return err
diff --git a/storage/filesystem/internal/dotgit/dotgit.go b/storage/filesystem/internal/dotgit/dotgit.go
index e22ed58..cacda68 100644
--- a/storage/filesystem/internal/dotgit/dotgit.go
+++ b/storage/filesystem/internal/dotgit/dotgit.go
@@ -236,6 +236,7 @@ type PackWriter struct {
checksum core.Hash
index idxfile.Idxfile
result chan error
+ Notify func(h core.Hash, i idxfile.Idxfile)
}
func newPackWrite(fs fs.Filesystem) (*PackWriter, error) {
@@ -265,7 +266,7 @@ func newPackWrite(fs fs.Filesystem) (*PackWriter, error) {
func (w *PackWriter) buildIndex() {
defer w.sr.Close()
o := memory.NewStorage().ObjectStorage()
- s := packfile.NewScannerFromReader(w.sr)
+ s := packfile.NewScanner(w.sr)
d := packfile.NewDecoder(s, o)
checksum, err := d.Decode()
@@ -307,7 +308,15 @@ func (w *PackWriter) Close() error {
return err
}
- return w.save()
+ if err := w.save(); err != nil {
+ return err
+ }
+
+ if w.Notify != nil {
+ w.Notify(w.checksum, w.index)
+ }
+
+ return nil
}
func (w *PackWriter) save() error {
diff --git a/storage/filesystem/object.go b/storage/filesystem/object.go
index 0b82524..dc21d0b 100644
--- a/storage/filesystem/object.go
+++ b/storage/filesystem/object.go
@@ -67,7 +67,19 @@ func (s *ObjectStorage) NewObject() core.Object {
// Writer method not supported on Memory storage
func (s *ObjectStorage) Writer() (io.WriteCloser, error) {
- return s.dir.NewObjectPack()
+ w, err := s.dir.NewObjectPack()
+ if err != nil {
+ return nil, err
+ }
+
+ w.Notify = func(h core.Hash, idx idxfile.Idxfile) {
+ s.index[h] = make(index)
+ for _, e := range idx.Entries {
+ s.index[h][e.Hash] = int64(e.Offset)
+ }
+ }
+
+ return w, nil
}
// Set adds a new object to the storage. As this functionality is not
@@ -150,6 +162,7 @@ func (s *ObjectStorage) getFromPackfile(h core.Hash) (core.Object, error) {
p := packfile.NewScanner(f)
d := packfile.NewDecoder(p, memory.NewStorage().ObjectStorage())
+ d.SetOffsets(s.index[pack])
return d.ReadObjectAt(offset)
}
diff --git a/storage/filesystem/object_test.go b/storage/filesystem/object_test.go
index 14c77e4..07fa646 100644
--- a/storage/filesystem/object_test.go
+++ b/storage/filesystem/object_test.go
@@ -17,7 +17,7 @@ func (s *FsSuite) SetUpSuite(c *C) {
}
func (s *FsSuite) TestGetFromObjectFile(c *C) {
- fs := fixtures.ByTag(".git").ByTag("unpacked").DotGit()
+ fs := fixtures.ByTag(".git").ByTag("unpacked").One().DotGit()
o, err := newObjectStorage(dotgit.New(fs))
c.Assert(err, IsNil)
@@ -28,18 +28,20 @@ func (s *FsSuite) TestGetFromObjectFile(c *C) {
}
func (s *FsSuite) TestGetFromPackfile(c *C) {
- fs := fixtures.Basic().ByTag(".git").DotGit()
- o, err := newObjectStorage(dotgit.New(fs))
- c.Assert(err, IsNil)
+ fixtures.Basic().ByTag(".git").Test(c, func(f *fixtures.Fixture) {
+ fs := f.DotGit()
+ o, err := newObjectStorage(dotgit.New(fs))
+ c.Assert(err, IsNil)
- expected := core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- obj, err := o.Get(core.AnyObject, expected)
- c.Assert(err, IsNil)
- c.Assert(obj.Hash(), Equals, expected)
+ expected := core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
+ obj, err := o.Get(core.AnyObject, expected)
+ c.Assert(err, IsNil)
+ c.Assert(obj.Hash(), Equals, expected)
+ })
}
func (s *FsSuite) TestGetFromPackfileMultiplePackfiles(c *C) {
- fs := fixtures.ByTag(".git").ByTag("multi-packfile").DotGit()
+ fs := fixtures.ByTag(".git").ByTag("multi-packfile").One().DotGit()
o, err := newObjectStorage(dotgit.New(fs))
c.Assert(err, IsNil)