aboutsummaryrefslogtreecommitdiffstats
path: root/formats/packfile
diff options
context:
space:
mode:
authorMáximo Cuadros <mcuadros@gmail.com>2016-09-08 23:58:41 +0200
committerMáximo Cuadros <mcuadros@gmail.com>2016-09-08 23:58:41 +0200
commit3b1baea2dd9353f42b3a9d93f6bc92ecbe9f4f01 (patch)
treed255b7e6edbcbaba1f98cf0c666a82c0f9750019 /formats/packfile
parent2293a3dbda0a96f5cbac851bb73e1e675417e4f3 (diff)
downloadgo-git-3b1baea2dd9353f42b3a9d93f6bc92ecbe9f4f01.tar.gz
format: packfile based on ObjectStorage and CRC32 calculation
Diffstat (limited to 'formats/packfile')
-rw-r--r--formats/packfile/decoder.go161
-rw-r--r--formats/packfile/decoder_test.go139
-rw-r--r--formats/packfile/parser.go87
-rw-r--r--formats/packfile/parser_test.go553
4 files changed, 294 insertions, 646 deletions
diff --git a/formats/packfile/decoder.go b/formats/packfile/decoder.go
index 18ec6b9..92c42af 100644
--- a/formats/packfile/decoder.go
+++ b/formats/packfile/decoder.go
@@ -39,19 +39,23 @@ var (
// Decoder reads and decodes packfiles from an input stream.
type Decoder struct {
- scanner *Scanner
- storage core.ObjectStorage
- offsetToObject map[int64]core.Object
- hashToOffset map[core.Hash]int64
+ s *Scanner
+ o core.ObjectStorage
+ tx core.TxObjectStorage
+
+ offsets map[int64]core.Hash
+ crcs map[core.Hash]uint32
}
// NewDecoder returns a new Decoder that reads from r.
-func NewDecoder(p *Scanner, s core.ObjectStorage) *Decoder {
+func NewDecoder(s *Scanner, o core.ObjectStorage) *Decoder {
return &Decoder{
- scanner: p,
- storage: s,
- offsetToObject: make(map[int64]core.Object, 0),
- hashToOffset: make(map[core.Hash]int64, 0),
+ s: s,
+ o: o,
+ tx: o.Begin(),
+
+ offsets: make(map[int64]core.Hash, 0),
+ crcs: make(map[core.Hash]uint32, 0),
}
}
@@ -61,48 +65,38 @@ func (d *Decoder) Decode() (checksum core.Hash, err error) {
return core.ZeroHash, err
}
- return d.scanner.Checksum()
+ return d.s.Checksum()
}
func (d *Decoder) doDecode() error {
- _, count, err := d.scanner.Header()
+ _, count, err := d.s.Header()
if err != nil {
return err
}
- if d.storage == nil {
- return d.readObjects(count, nil)
- }
-
- tx := d.storage.Begin()
- if err := d.readObjects(count, tx); err != nil {
- if err := tx.Rollback(); err != nil {
+ if err := d.readObjects(count); err != nil {
+ if err := d.tx.Rollback(); err != nil {
return nil
}
return err
}
- if err := tx.Commit(); err != nil {
+ if err := d.tx.Commit(); err != nil {
return err
}
return nil
}
-func (d *Decoder) readObjects(count uint32, tx core.TxObjectStorage) error {
+func (d *Decoder) readObjects(count uint32) error {
for i := 0; i < int(count); i++ {
obj, err := d.readObject()
if err != nil {
return err
}
- if tx == nil {
- continue
- }
-
- _, err = tx.Set(obj)
- if err != nil {
+ if _, err := d.tx.Set(obj); err != nil {
return err
}
}
@@ -111,22 +105,22 @@ func (d *Decoder) readObjects(count uint32, tx core.TxObjectStorage) error {
}
func (d *Decoder) readObject() (core.Object, error) {
- h, err := d.scanner.NextObjectHeader()
+ h, err := d.s.NextObjectHeader()
if err != nil {
return nil, err
}
- obj := d.newObject()
+ obj := d.o.NewObject()
obj.SetSize(h.Length)
obj.SetType(h.Type)
-
+ var crc uint32
switch h.Type {
case core.CommitObject, core.TreeObject, core.BlobObject, core.TagObject:
- err = d.fillRegularObjectContent(obj)
+ crc, err = d.fillRegularObjectContent(obj)
case core.REFDeltaObject:
- err = d.fillREFDeltaObjectContent(obj, h.Reference)
+ crc, err = d.fillREFDeltaObjectContent(obj, h.Reference)
case core.OFSDeltaObject:
- err = d.fillOFSDeltaObjectContent(obj, h.OffsetReference)
+ crc, err = d.fillOFSDeltaObjectContent(obj, h.OffsetReference)
default:
err = ErrInvalidObject.AddDetails("type %q", h.Type)
}
@@ -135,109 +129,81 @@ func (d *Decoder) readObject() (core.Object, error) {
return obj, err
}
- d.remember(h.Offset, obj)
+ d.remember(obj, h.Offset, crc)
return obj, nil
}
-func (d *Decoder) newObject() core.Object {
- if d.storage == nil {
- return &core.MemoryObject{}
- }
-
- return d.storage.NewObject()
-}
-
-func (d *Decoder) fillRegularObjectContent(obj core.Object) error {
+func (d *Decoder) fillRegularObjectContent(obj core.Object) (uint32, error) {
w, err := obj.Writer()
if err != nil {
- return err
+ return 0, err
}
- _, err = d.scanner.NextObject(w)
- return err
+ _, crc, err := d.s.NextObject(w)
+ return crc, err
}
-func (d *Decoder) fillREFDeltaObjectContent(obj core.Object, ref core.Hash) error {
+func (d *Decoder) fillREFDeltaObjectContent(obj core.Object, ref core.Hash) (uint32, error) {
buf := bytes.NewBuffer(nil)
- if _, err := d.scanner.NextObject(buf); err != nil {
- return err
+ _, crc, err := d.s.NextObject(buf)
+ if err != nil {
+ return 0, err
}
base, err := d.recallByHash(ref)
if err != nil {
- return err
+ return 0, err
}
obj.SetType(base.Type())
- return ApplyDelta(obj, base, buf.Bytes())
+ return crc, ApplyDelta(obj, base, buf.Bytes())
}
-func (d *Decoder) fillOFSDeltaObjectContent(obj core.Object, offset int64) error {
+func (d *Decoder) fillOFSDeltaObjectContent(obj core.Object, offset int64) (uint32, error) {
buf := bytes.NewBuffer(nil)
- if _, err := d.scanner.NextObject(buf); err != nil {
- return err
+ _, crc, err := d.s.NextObject(buf)
+ if err != nil {
+ return 0, err
}
base, err := d.recallByOffset(offset)
if err != nil {
- return err
+ return 0, err
}
obj.SetType(base.Type())
- return ApplyDelta(obj, base, buf.Bytes())
+ return crc, ApplyDelta(obj, base, buf.Bytes())
}
-// remember stores the offset of the object and its hash and the object itself.
-// If a seeker was not provided to the decoder, the objects are stored in memory
-func (d *Decoder) remember(o int64, obj core.Object) {
+func (d *Decoder) remember(obj core.Object, offset int64, crc uint32) {
h := obj.Hash()
- d.hashToOffset[h] = o
- if !d.scanner.IsSeekable() {
- d.offsetToObject[o] = obj
- }
+ d.offsets[offset] = h
+ d.crcs[h] = crc
}
-// recallByHash returns the object for a given hash by looking for it again in
-// the io.ReadeSeerker.
-func (d *Decoder) recallByHash(h core.Hash) (core.Object, error) {
- o, ok := d.hashToOffset[h]
- if !ok {
- return nil, ErrCannotRecall.AddDetails("hash not found: %s", h)
- }
-
- return d.recallByOffset(o)
-}
-
-// recallByOffset returns the object for a given offset by looking for it again in
-// the io.ReadeSeerker. For efficiency reasons, this method always find objects by
-// offset, even if they have not been remembered or if they have been forgetted.
func (d *Decoder) recallByOffset(o int64) (core.Object, error) {
- obj, ok := d.offsetToObject[o]
+ h, ok := d.offsets[o]
if ok {
- return obj, nil
+ return d.recallByHash(h)
}
- if !ok && !d.scanner.IsSeekable() {
- return nil, ErrCannotRecall.AddDetails("no object found at offset %d", o)
- }
+ return nil, ErrCannotRecall.AddDetails("no object found at offset %d", o)
+}
- return d.ReadObjectAt(o)
+func (d *Decoder) recallByHash(h core.Hash) (core.Object, error) {
+ return d.tx.Get(core.AnyObject, h)
}
// ReadObjectAt reads an object at the given location
func (d *Decoder) ReadObjectAt(offset int64) (core.Object, error) {
- if !d.scanner.IsSeekable() {
- return nil, ErrNotSeeker
- }
-
- beforeJump, err := d.scanner.Seek(offset)
+ beforeJump, err := d.s.Seek(offset)
if err != nil {
return nil, err
}
defer func() {
- _, seekErr := d.scanner.Seek(beforeJump)
+ _, seekErr := d.s.Seek(beforeJump)
if err == nil {
err = seekErr
}
@@ -246,14 +212,23 @@ func (d *Decoder) ReadObjectAt(offset int64) (core.Object, error) {
return d.readObject()
}
-// Index returns an index of the objects read by hash and the position where
-// was read
-func (d *Decoder) Index() map[core.Hash]int64 {
- return d.hashToOffset
+// Offsets returns the objects read offset
+func (d *Decoder) Offsets() map[core.Hash]int64 {
+ i := make(map[core.Hash]int64, len(d.offsets))
+ for o, h := range d.offsets {
+ i[h] = o
+ }
+
+ return i
+}
+
+// CRCs returns the CRC-32 for each objected read
+func (d *Decoder) CRCs() map[core.Hash]uint32 {
+ return d.crcs
}
// Close close the Scanner, usually this mean that the whole reader is read and
// discarded
func (d *Decoder) Close() error {
- return d.scanner.Close()
+ return d.s.Close()
}
diff --git a/formats/packfile/decoder_test.go b/formats/packfile/decoder_test.go
index e229f50..23fa93a 100644
--- a/formats/packfile/decoder_test.go
+++ b/formats/packfile/decoder_test.go
@@ -1,12 +1,10 @@
package packfile
import (
- "bytes"
- "encoding/base64"
- "os"
"testing"
"gopkg.in/src-d/go-git.v4/core"
+ "gopkg.in/src-d/go-git.v4/fixtures"
"gopkg.in/src-d/go-git.v4/storage/memory"
. "gopkg.in/check.v1"
@@ -18,97 +16,74 @@ type ReaderSuite struct{}
var _ = Suite(&ReaderSuite{})
-var packFileWithEmptyObjects = "UEFDSwAAAAIAAAALnw54nKXMQWoDMQxA0b1PoX2hSLIm44FSAlmXnEG2NYlhXAfHgdLb5Cy9WAM5Qpb/Lf7oZqArUpakyYtQjCoxZ5lmWXwwyuzJbHqAuYt2+x6QoyCyhYCKIa67lGameSLWvPh5JU0hsCg7vY1z6/D1d/8ptcHhprm3Kxz7KL/wUdOz96eqZXtPrX4CCeOOPU8Eb0iI7qG1jGGvXdxaNoPs/gHeNkp8lA94nKXMQUpDMRCA4X1OMXtBZpI3L3kiRXAtPcMkmWjgxZSYQultPEsv1oJHcPl/i38OVRC0IXF0lshrJorZEcpKmTEJYbA+B3aFzEmGfk9gpqJEsmnZNutXF71i1IURU/G0bsWWwJ6NnOdXH/Bx+73U1uH9LHn0HziOWa/w2tJfv302qftz6u0AtFh0wQdmeEJCNA9tdU7938WUuivEF5CczR11ZEsNnw54nKWMUQoCIRRF/13F+w/ijY6jQkTQd7SGpz5LyAxzINpNa2ljTbSEPu/hnNsbM4TJTzqyt561GdUUmJKT6K2MeiCVgnZWoY/iRo2vHVS0URrUS+e+dkqIEp11HMhh9IaUkRM6QXM/1waH9+uRS4X9TLHVOxxbz0/YlPDbu1OhfFmHWrYwjBKVNVaNsMIBUSy05N75vxeR8oXBiw8GoErCnwt4nKXMzQkCMRBA4XuqmLsgM2M2ZkAWwbNYQ341sCEQsyB2Yy02pmAJHt93eKOnBFpMNJqtl5CFxVIMomViomQSEWP2JrN3yq3j1jqc369HqQ1Oq4u93eHSR3nCoYZfH6/VlWUbWp2BNOPO7i1OsEFCVF+tZYz030XlsiRw6gPZ0jxaqwV4nDM0MDAzMVFIZHg299HsTRevOXt3a64rj7px6ElP8ERDiGQSQ2uoXe8RrcodS5on+J4/u8HjD4NDKFQyRS8tPx+rbgDt3yiEMHicAwAAAAABPnicS0wEAa4kMOACACTjBKdkZXici7aaYAUAA3gBYKoDeJwzNDAwMzFRSGR4NvfR7E0Xrzl7d2uuK4+6cehJT/BEQ4hkEsOELYFJvS2eX47UJdVttFQrenrmzQwA13MaiDd4nEtMBAEuAApMAlGtAXicMzQwMDMxUUhkeDb30exNF685e3drriuPunHoSU/wRACvkA258N/i8hVXx9CiAZzvFXNIhCuSFmE="
-
-func (s *ReaderSuite) TestReadPackfile(c *C) {
- data, _ := base64.StdEncoding.DecodeString(packFileWithEmptyObjects)
- f := bytes.NewReader(data)
- sto := memory.NewStorage()
- d := NewDecoder(NewScanner(f), sto.ObjectStorage())
-
- _, err := d.Decode()
- c.Assert(err, IsNil)
-
- AssertObjects(c, sto, []string{
- "778c85ff95b5514fea0ba4c7b6a029d32e2c3b96",
- "db4002e880a08bf6cc7217512ad937f1ac8824a2",
- "551fe11a9ef992763b7e0be4500cf7169f2f8575",
- "3d8d2705c6b936ceff0020989eca90db7a372609",
- "af01d4cac3441bba4bdd4574938e1d231ee5d45e",
- "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
- "85553e8dc42a79b8a483904dcfcdb048fc004055",
- "a028c5b32117ed11bd310a61d50ca10827d853f1",
- "c6b65deb8be57436ceaf920b82d51a3fc59830bd",
- "90b451628d8449f4c47e627eb1392672e5ccec98",
- "496d6428b9cf92981dc9495211e6e1120fb6f2ba",
- })
+func (s *ReaderSuite) SetUpSuite(c *C) {
+ fixtures.RootFolder = "../../fixtures"
}
-func (s *ReaderSuite) TestDecodeOFSDelta(c *C) {
- s.testDecode(c, "fixtures/git-fixture.ofs-delta", true)
-}
+func (s *ReaderSuite) TestDecode(c *C) {
+ fixtures.Basic().Test(c, func(f *fixtures.Fixture) {
+ scanner := NewScanner(f.Packfile())
+ storage := memory.NewStorage()
-func (s *ReaderSuite) TestDecodeOFSDeltaNoSeekable(c *C) {
- s.testDecode(c, "fixtures/git-fixture.ofs-delta", false)
-}
+ d := NewDecoder(scanner, storage.ObjectStorage())
-func (s *ReaderSuite) TestDecodeREFDelta(c *C) {
- s.testDecode(c, "fixtures/git-fixture.ref-delta", true)
-}
+ ch, err := d.Decode()
+ c.Assert(err, IsNil)
+ c.Assert(ch, Equals, f.PackfileHash)
+
+ AssertObjects(c, storage, []string{
+ "918c48b83bd081e863dbe1b80f8998f058cd8294",
+ "af2d6a6954d532f8ffb47615169c8fdf9d383a1a",
+ "1669dce138d9b841a518c64b10914d88f5e488ea",
+ "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69",
+ "b8e471f58bcbca63b07bda20e428190409c2db47",
+ "35e85108805c84807bc66a02d91535e1e24b38b9",
+ "b029517f6300c2da0f4b651b8642506cd6aaf45d",
+ "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88",
+ "d3ff53e0564a9f87d8e84b6e28e5060e517008aa",
+ "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f",
+ "d5c0f4ab811897cadf03aec358ae60d21f91c50d",
+ "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9",
+ "cf4aa3b38974fb7d81f367c0830f7d78d65ab86b",
+ "9dea2395f5403188298c1dabe8bdafe562c491e3",
+ "586af567d0bb5e771e49bdd9434f5e0fb76d25fa",
+ "9a48f23120e880dfbe41f7c9b7b708e9ee62a492",
+ "5a877e6a906a2743ad6e45d99c1793642aaf8eda",
+ "c8f1d8c61f9da76f4cb49fd86322b6e685dba956",
+ "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
+ "a39771a7651f97faf5c72e08224d857fc35133db",
+ "880cd14280f4b9b6ed3986d6671f907d7cc2a198",
+ "fb72698cab7617ac416264415f13224dfd7a165e",
+ "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd",
+ "eba74343e2f15d62adedfd8c883ee0262b5c8021",
+ "c2d30fa8ef288618f65f6eed6e168e0d514886f4",
+ "8dcef98b1d52143e1e2dbc458ffe38f925786bf2",
+ "aa9b383c260e1d05fbbf6b30a02914555e20c725",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5",
+ "dbd3641b371024f44d0e469a9c8f5457b0660de1",
+ "e8d3ffab552895c19b9fcf7aa264d277cde33881",
+ "7e59600739c96546163833214c36459e324bad0a",
+ })
-func (s *ReaderSuite) TestDecodeREFDeltaNoSeekable(c *C) {
- s.testDecode(c, "fixtures/git-fixture.ref-delta", false)
+ })
}
+func (s *ReaderSuite) TestDecodeCRCs(c *C) {
+ f := fixtures.Basic().ByTag("ofs-delta")
+
+ scanner := NewScanner(f.Packfile())
+ storage := memory.NewStorage()
-func (s *ReaderSuite) testDecode(c *C, file string, seekable bool) {
- f, err := os.Open(file)
+ d := NewDecoder(scanner, storage.ObjectStorage())
+ _, err := d.Decode()
c.Assert(err, IsNil)
- scanner := NewScanner(f)
- if !seekable {
- scanner = NewScannerFromReader(f)
+ var sum uint64
+ for _, crc := range d.CRCs() {
+ sum += uint64(crc)
}
- s.doTestDecodeWithScanner(c, scanner)
-}
-
-func (s *ReaderSuite) doTestDecodeWithScanner(c *C, scanner *Scanner) {
- sto := memory.NewStorage()
- d := NewDecoder(scanner, sto.ObjectStorage())
+ c.Assert(int(sum), Equals, 78022211966)
- _, err := d.Decode()
- c.Assert(err, IsNil)
-
- AssertObjects(c, sto, []string{
- "918c48b83bd081e863dbe1b80f8998f058cd8294",
- "af2d6a6954d532f8ffb47615169c8fdf9d383a1a",
- "1669dce138d9b841a518c64b10914d88f5e488ea",
- "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69",
- "b8e471f58bcbca63b07bda20e428190409c2db47",
- "35e85108805c84807bc66a02d91535e1e24b38b9",
- "b029517f6300c2da0f4b651b8642506cd6aaf45d",
- "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88",
- "d3ff53e0564a9f87d8e84b6e28e5060e517008aa",
- "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f",
- "d5c0f4ab811897cadf03aec358ae60d21f91c50d",
- "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9",
- "cf4aa3b38974fb7d81f367c0830f7d78d65ab86b",
- "9dea2395f5403188298c1dabe8bdafe562c491e3",
- "586af567d0bb5e771e49bdd9434f5e0fb76d25fa",
- "9a48f23120e880dfbe41f7c9b7b708e9ee62a492",
- "5a877e6a906a2743ad6e45d99c1793642aaf8eda",
- "c8f1d8c61f9da76f4cb49fd86322b6e685dba956",
- "a8d315b2b1c615d43042c3a62402b8a54288cf5c",
- "a39771a7651f97faf5c72e08224d857fc35133db",
- "880cd14280f4b9b6ed3986d6671f907d7cc2a198",
- "fb72698cab7617ac416264415f13224dfd7a165e",
- "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd",
- "eba74343e2f15d62adedfd8c883ee0262b5c8021",
- "c2d30fa8ef288618f65f6eed6e168e0d514886f4",
- "8dcef98b1d52143e1e2dbc458ffe38f925786bf2",
- "aa9b383c260e1d05fbbf6b30a02914555e20c725",
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5",
- })
}
func AssertObjects(c *C, s *memory.Storage, expects []string) {
diff --git a/formats/packfile/parser.go b/formats/packfile/parser.go
index 346b4c3..cbd8d6c 100644
--- a/formats/packfile/parser.go
+++ b/formats/packfile/parser.go
@@ -6,6 +6,8 @@ import (
"compress/zlib"
"encoding/binary"
"fmt"
+ "hash"
+ "hash/crc32"
"io"
"io/ioutil"
@@ -42,7 +44,8 @@ type ObjectHeader struct {
// A Parser is a collection of functions to read and process data form a packfile.
// Values from this type are not zero-value safe. See the NewParser function bellow.
type Scanner struct {
- r *bufferedSeeker
+ r reader
+ crc hash.Hash32
// pendingObject is used to detect if an object has been read, or still
// is waiting to be read
@@ -56,8 +59,11 @@ func NewScannerFromReader(r io.Reader) *Scanner {
}
func NewScanner(r io.ReadSeeker) *Scanner {
- s := newByteReadSeeker(r)
- return &Scanner{r: s}
+ crc := crc32.NewIEEE()
+ seeker := newByteReadSeeker(r)
+ tee := &teeReader{seeker, crc}
+
+ return &Scanner{r: tee, crc: crc}
}
// Header reads the whole packfile header (signature, version and object count).
@@ -139,6 +145,8 @@ func (s *Scanner) NextObjectHeader() (*ObjectHeader, error) {
return nil, err
}
+ s.crc.Reset()
+
h := &ObjectHeader{}
s.pendingObject = h
@@ -178,7 +186,7 @@ func (s *Scanner) discardObjectIfNeeded() error {
}
h := s.pendingObject
- n, err := s.NextObject(ioutil.Discard)
+ n, _, err := s.NextObject(ioutil.Discard)
if err != nil {
return err
}
@@ -209,7 +217,7 @@ func (s *Scanner) readObjectTypeAndLength() (core.ObjectType, int64, error) {
func (s *Scanner) readType() (core.ObjectType, byte, error) {
var c byte
var err error
- if c, err = s.r.ReadByte(); err != nil {
+ if c, err = s.readByte(); err != nil {
return core.ObjectType(0), 0, err
}
@@ -227,7 +235,7 @@ func (s *Scanner) readLength(first byte) (int64, error) {
shift := firstLengthBits
var err error
for moreBytesInLength(c) {
- if c, err = s.r.ReadByte(); err != nil {
+ if c, err = s.readByte(); err != nil {
return 0, err
}
@@ -238,9 +246,13 @@ func (s *Scanner) readLength(first byte) (int64, error) {
return length, nil
}
-func (s *Scanner) NextObject(w io.Writer) (written int64, err error) {
+func (s *Scanner) NextObject(w io.Writer) (written int64, crc32 uint32, err error) {
+ defer s.crc.Reset()
+
s.pendingObject = nil
- return s.copyObject(w)
+ written, err = s.copyObject(w)
+ crc32 = s.crc.Sum32()
+ return
}
// ReadRegularObject reads and write a non-deltified object
@@ -248,9 +260,7 @@ func (s *Scanner) NextObject(w io.Writer) (written int64, err error) {
func (s *Scanner) copyObject(w io.Writer) (int64, error) {
zr, err := zlib.NewReader(s.r)
if err != nil {
- if err != zlib.ErrHeader {
- return -1, fmt.Errorf("zlib reading error: %s", err)
- }
+ return -1, fmt.Errorf("zlib reading error: %s", err)
}
defer func() {
@@ -263,11 +273,6 @@ func (s *Scanner) copyObject(w io.Writer) (int64, error) {
return io.Copy(w, zr)
}
-func (s *Scanner) IsSeekable() bool {
- _, ok := s.r.r.(*trackableReader)
- return !ok
-}
-
// Seek sets a new offset from start, returns the old position before the change
func (s *Scanner) Seek(offset int64) (previous int64, err error) {
previous, err = s.r.Seek(0, io.SeekCurrent)
@@ -329,14 +334,14 @@ func (s *Scanner) readNegativeOffset() (int64, error) {
var c byte
var err error
- if c, err = s.r.ReadByte(); err != nil {
+ if c, err = s.readByte(); err != nil {
return 0, err
}
var offset = int64(c & maskLength)
for moreBytesInLength(c) {
offset++
- if c, err = s.r.ReadByte(); err != nil {
+ if c, err = s.readByte(); err != nil {
return 0, err
}
offset = (offset << lengthBits) + int64(c&maskLength)
@@ -345,6 +350,15 @@ func (s *Scanner) readNegativeOffset() (int64, error) {
return -offset, nil
}
+func (s *Scanner) readByte() (byte, error) {
+ b, err := s.r.ReadByte()
+ if err != nil {
+ return 0, err
+ }
+
+ return b, err
+}
+
func (s *Scanner) Close() error {
_, err := io.Copy(ioutil.Discard, s.r)
return err
@@ -368,8 +382,8 @@ func parseType(b byte) core.ObjectType {
}
type trackableReader struct {
- io.Reader
count int64
+ io.Reader
}
// Read reads up to len(p) bytes into p.
@@ -411,6 +425,39 @@ func (r *bufferedSeeker) Seek(offset int64, whence int) (int64, error) {
return current - int64(r.Buffered()), nil
}
- defer r.Reset(r.r)
+ defer r.Reader.Reset(r.r)
return r.r.Seek(offset, whence)
}
+
+type reader interface {
+ io.Reader
+ io.ByteReader
+ io.Seeker
+}
+
+type teeReader struct {
+ reader
+ w hash.Hash32
+}
+
+func (r *teeReader) Read(p []byte) (n int, err error) {
+ n, err = r.reader.Read(p)
+ if n > 0 {
+ if n, err := r.w.Write(p[:n]); err != nil {
+ return n, err
+ }
+ }
+ return
+}
+
+func (r *teeReader) ReadByte() (b byte, err error) {
+ b, err = r.reader.ReadByte()
+ if err == nil {
+ _, err := r.w.Write([]byte{b})
+ if err != nil {
+ return 0, err
+ }
+ }
+
+ return
+}
diff --git a/formats/packfile/parser_test.go b/formats/packfile/parser_test.go
index 0c07daa..d746b98 100644
--- a/formats/packfile/parser_test.go
+++ b/formats/packfile/parser_test.go
@@ -2,502 +2,153 @@ package packfile
import (
"bytes"
- "encoding/base64"
- "io/ioutil"
. "gopkg.in/check.v1"
"gopkg.in/src-d/go-git.v4/core"
+ "gopkg.in/src-d/go-git.v4/fixtures"
)
type ScannerSuite struct{}
var _ = Suite(&ScannerSuite{})
+func (s *ScannerSuite) SetUpSuite(c *C) {
+ fixtures.RootFolder = "../../fixtures"
+}
+
func (s *ScannerSuite) TestHeader(c *C) {
- data, _ := base64.StdEncoding.DecodeString(packFileWithEmptyObjects)
+ r := fixtures.Basic().One().Packfile()
+ p := NewScanner(r)
- p := NewScanner(bytes.NewReader(data))
version, objects, err := p.Header()
c.Assert(err, IsNil)
c.Assert(version, Equals, VersionSupported)
- c.Assert(objects, Equals, uint32(11))
+ c.Assert(objects, Equals, uint32(31))
}
-func (s *ScannerSuite) TestNextObjectHeader(c *C) {
- data, _ := base64.StdEncoding.DecodeString(packFileWithEmptyObjects)
+func (s *ScannerSuite) TestNextObjectHeaderREFDelta(c *C) {
+ s.testNextObjectHeader(c, "ref-delta", expectedHeadersREF)
+}
- r := bytes.NewReader(data)
+func (s *ScannerSuite) TestNextObjectHeaderOFSDelta(c *C) {
+ s.testNextObjectHeader(c, "ofs-delta", expectedHeadersOFS)
+}
+
+func (s *ScannerSuite) testNextObjectHeader(c *C, tag string, expected []ObjectHeader) {
+ r := fixtures.Basic().ByTag(tag).Packfile()
p := NewScanner(r)
+
_, objects, err := p.Header()
c.Assert(err, IsNil)
for i := 0; i < int(objects); i++ {
h, err := p.NextObjectHeader()
c.Assert(err, IsNil)
- c.Assert(*h, DeepEquals, expectedHeaders[i])
+ c.Assert(*h, DeepEquals, expected[i])
buf := bytes.NewBuffer(nil)
- n, err := p.NextObject(buf)
+ n, _, err := p.NextObject(buf)
c.Assert(err, IsNil)
c.Assert(n, Equals, h.Length)
}
- n, err := ioutil.ReadAll(r)
+ n, err := p.Checksum()
c.Assert(err, IsNil)
c.Assert(n, HasLen, 20)
}
func (s *ScannerSuite) TestNextObjectHeaderWithOutReadObject(c *C) {
- data, _ := base64.StdEncoding.DecodeString(packFileWithEmptyObjects)
-
- r := bytes.NewReader(data)
+ f := fixtures.Basic().ByTag("ref-delta")
+ r := f.Packfile()
p := NewScanner(r)
+
_, objects, err := p.Header()
c.Assert(err, IsNil)
for i := 0; i < int(objects); i++ {
- h, err := p.NextObjectHeader()
+ h, _ := p.NextObjectHeader()
c.Assert(err, IsNil)
- c.Assert(*h, DeepEquals, expectedHeaders[i])
+ c.Assert(*h, DeepEquals, expectedHeadersREF[i])
}
err = p.discardObjectIfNeeded()
c.Assert(err, IsNil)
- n, err := ioutil.ReadAll(r)
- c.Assert(err, IsNil)
- c.Assert(n, HasLen, 20)
-}
-
-var expectedHeaders = []ObjectHeader{
- {Type: core.CommitObject, Offset: 12, Length: 239},
- {Type: core.CommitObject, Offset: 177, Length: 244},
- {Type: core.CommitObject, Offset: 345, Length: 239},
- {Type: core.CommitObject, Offset: 507, Length: 191},
- {Type: core.TreeObject, Offset: 639, Length: 91},
- {Type: core.BlobObject, Offset: 714, Length: 0},
- {Type: core.BlobObject, Offset: 723, Length: 14},
- {Type: core.OFSDeltaObject, Offset: 740, Length: 4, OffsetReference: 639},
- {Type: core.TreeObject, Offset: 754, Length: 58},
- {Type: core.BlobObject, Offset: 820, Length: 7},
- {Type: core.TreeObject, Offset: 833, Length: 29},
-}
-
-/*
-const (
- sigOffset = 0
- verOffset = 4
- countOffset = 8
-)
-
-type ParserSuite struct {
- fixtures map[string]*fix
-}
-
-type fix struct {
- path string
- parser *Parser
- seekable io.Seeker
-}
-
-func newFix(path string) (*fix, error) {
- fix := new(fix)
- fix.path = path
-
- f, err := os.Open(path)
- if err != nil {
- return nil, err
- }
-
- data, err := ioutil.ReadAll(f)
- if err != nil {
- return nil, err
- }
-
- if err = f.Close(); err != nil {
- return nil, err
- }
-
- seekable := NewSeekable(bytes.NewReader(data))
- fix.seekable = seekable
- fix.parser = NewParser(seekable)
-
- return fix, nil
-}
-
-func (f *fix) seek(o int64) error {
- _, err := f.seekable.Seek(o, os.SEEK_SET)
- return err
-}
-
-var _ = Suite(&ParserSuite{})
-
-func (s *ParserSuite) SetUpSuite(c *C) {
- s.fixtures = make(map[string]*fix)
- for _, fixData := range []struct {
- id string
- path string
- }{
- {"ofs-deltas", "fixtures/alcortesm-binary-relations.pack"},
- {"ref-deltas", "fixtures/git-fixture.ref-delta"},
- } {
- fix, err := newFix(fixData.path)
- c.Assert(err, IsNil,
- Commentf("setting up fixture id %s: %s", fixData.id, err))
-
- _, ok := s.fixtures[fixData.id]
- c.Assert(ok, Equals, false,
- Commentf("duplicated fixture id: %s", fixData.id))
-
- s.fixtures[fixData.id] = fix
- }
-}
-
-func (s *ParserSuite) TestSignature(c *C) {
- for id, fix := range s.fixtures {
- com := Commentf("fixture id = %s", id)
- err := fix.seek(sigOffset)
- c.Assert(err, IsNil, com)
- p := fix.parser
-
- sig, err := p.ReadSignature()
- c.Assert(err, IsNil, com)
- c.Assert(p.IsValidSignature(sig), Equals, true, com)
- }
-}
-
-func (s *ParserSuite) TestVersion(c *C) {
- for i, test := range [...]struct {
- fixID string
- expected uint32
- }{
- {
- fixID: "ofs-deltas",
- expected: uint32(2),
- }, {
- fixID: "ref-deltas",
- expected: uint32(2),
- },
- } {
- com := Commentf("test %d) fixture id = %s", i, test.fixID)
- fix, ok := s.fixtures[test.fixID]
- c.Assert(ok, Equals, true, com)
-
- err := fix.seek(verOffset)
- c.Assert(err, IsNil, com)
- p := fix.parser
-
- v, err := p.ReadVersion()
- c.Assert(err, IsNil, com)
- c.Assert(v, Equals, test.expected, com)
- c.Assert(p.IsSupportedVersion(v), Equals, true, com)
- }
-}
-
-func (s *ParserSuite) TestCount(c *C) {
- for i, test := range [...]struct {
- fixID string
- expected uint32
- }{
- {
- fixID: "ofs-deltas",
- expected: uint32(0x50),
- }, {
- fixID: "ref-deltas",
- expected: uint32(0x1c),
- },
- } {
- com := Commentf("test %d) fixture id = %s", i, test.fixID)
- fix, ok := s.fixtures[test.fixID]
- c.Assert(ok, Equals, true, com)
-
- err := fix.seek(countOffset)
- c.Assert(err, IsNil, com)
- p := fix.parser
-
- count, err := p.ReadCount()
- c.Assert(err, IsNil, com)
- c.Assert(count, Equals, test.expected, com)
- }
-}
-
-func (s *ParserSuite) TestReadObjectTypeAndLength(c *C) {
- for i, test := range [...]struct {
- fixID string
- offset int64
- expType core.ObjectType
- expLength int64
- }{
- {
- fixID: "ofs-deltas",
- offset: 12,
- expType: core.CommitObject,
- expLength: 342,
- }, {
- fixID: "ofs-deltas",
- offset: 1212,
- expType: core.OFSDeltaObject,
- expLength: 104,
- }, {
- fixID: "ofs-deltas",
- offset: 3193,
- expType: core.TreeObject,
- expLength: 226,
- }, {
- fixID: "ofs-deltas",
- offset: 3639,
- expType: core.BlobObject,
- expLength: 90,
- }, {
- fixID: "ofs-deltas",
- offset: 4504,
- expType: core.BlobObject,
- expLength: 7107,
- }, {
- fixID: "ref-deltas",
- offset: 84849,
- expType: core.REFDeltaObject,
- expLength: 6,
- }, {
- fixID: "ref-deltas",
- offset: 85070,
- expType: core.REFDeltaObject,
- expLength: 8,
- },
- } {
- com := Commentf("test %d) fixture id = %s", i, test.fixID)
- fix, ok := s.fixtures[test.fixID]
- c.Assert(ok, Equals, true, com)
-
- err := fix.seek(test.offset)
- c.Assert(err, IsNil, com)
- p := fix.parser
-
- typ, length, err := p.ReadObjectTypeAndLength()
- c.Assert(err, IsNil, com)
- c.Assert(typ, Equals, test.expType, com)
- c.Assert(length, Equals, test.expLength, com)
- }
-}
-
-func (s *ParserSuite) TestReadNonDeltaObjectContent(c *C) {
- for i, test := range [...]struct {
- fixID string
- offset int64
- expected []byte
- }{
- {
- fixID: "ofs-deltas",
- offset: 12,
- expected: []byte("tree 87c87d16e815a43e4e574dd8edd72c5450ac3a8e\nparent a87d72684d1cf68099ce6e9f68689e25e645a14c\nauthor Gorka Guardiola <Gorka Guardiola Múzquiz> 1450265632 +0100\ncommitter Gorka Guardiola <Gorka Guardiola Múzquiz> 1450265632 +0100\n\nChanged example to use dot.\nI did not remove the original files outside of the\ntex, I leave that to alcortes.\n"),
- }, {
- fixID: "ofs-deltas",
- offset: 1610,
- expected: []byte("tree 4b4f0d9a07109ef0b8a3051138cc20cdb47fa513\nparent b373f85fa2594d7dcd9989f4a5858a81647fb8ea\nauthor Alberto Cortés <alberto@sourced.tech> 1448017995 +0100\ncommitter Alberto Cortés <alberto@sourced.tech> 1448018112 +0100\n\nMove generated images to it own dir (img/)\n\nFixes #1.\n"),
- }, {
- fixID: "ofs-deltas",
- offset: 10566,
- expected: []byte("40000 map-slice\x00\x00\xce\xfb\x8ew\xf7\xa8\xc6\x1b\x99\xdd$\x91\xffH\xa3\xb0\xb1fy40000 simple-arrays\x00\x9a7\x81\xb7\xfd\x9d(Q\xe2\xa4H\x8c\x03^٬\x90Z\xecy"),
- },
- } {
- com := Commentf("test %d) fixture id = %s", i, test.fixID)
- fix, ok := s.fixtures[test.fixID]
- c.Assert(ok, Equals, true, com)
-
- err := fix.seek(test.offset)
- c.Assert(err, IsNil, com)
- p := fix.parser
-
- _, _, err = p.ReadObjectTypeAndLength()
- c.Assert(err, IsNil, com)
-
- obj := &core.MemoryObject{}
- err = p.FillFromNonDeltaContent(obj)
- c.Assert(err, IsNil, com)
-
- r, _ := obj.Reader()
- bytes, _ := ioutil.ReadAll(r)
- c.Assert(bytes, DeepEquals, test.expected, com)
- }
-}
-
-func (s *ParserSuite) TestFillOFSDeltaObjectContent(c *C) {
- for i, test := range [...]struct {
- fixID string
- offset int64
- expOffset int64
- expType core.ObjectType
- expContent []byte
- }{
- {
- fixID: "ofs-deltas",
- offset: 1212,
- expOffset: -212,
- expType: core.CommitObject,
- expContent: []byte("tree c4573589ce78ac63769c20742b9a970f6e274a38\nparent 4571a24948494ebe1cb3dc18ca5a9286e79705ae\nauthor Alberto Cortés <alberto@sourced.tech> 1448139640 +0100\ncommitter Alberto Cortés <alberto@sourced.tech> 1448139640 +0100\n\nUpdate reference to binrels module\n"),
- }, {
- fixID: "ofs-deltas",
- offset: 3514,
- expOffset: -102,
- expType: core.TreeObject,
- expContent: []byte("100644 .gitignore\x00\u007fA\x90[Mw\xabJ\x9a-3O\xcd\x0f\xb5\xdbn\x8e!\x83100644 .gitmodules\x00\xd4`\xa8>\x15\xcfd\x05\x81B7_\xc4\v\x04\xa7\xa9A\x85\n100644 Makefile\x00-ҭ\x8c\x14\xdef\x12\xed\x15\x816y\xa6UK\xad\x993\v100644 binary-relations.tex\x00\x802\x05@\x11'^ \xf5<\xf7\xfd\x81%3\xd1o\xa9_$40000 graphs\x00\xdehu\x16\xc6\x0e\\H\x8e\xe9\xa1JIXE\xbaڽg\xc540000 imgs-gen\x00\xeb\"\xddhzg\xa3\x1f\xc8j\xc5\xfc豢\xe9\x96\xce\xce^40000 src\x00\x895\x11t\xff\x86\xa7\xea\xa6\xc0v%\x11E\x10f,ݒ\x1a"),
- }, {
- fixID: "ofs-deltas",
- offset: 9806,
- expOffset: -6613,
- expType: core.TreeObject,
- expContent: []byte("100644 .gitignore\x00\u007fA\x90[Mw\xabJ\x9a-3O\xcd\x0f\xb5\xdbn\x8e!\x83100644 .gitmodules\x00\xd4`\xa8>\x15\xcfd\x05\x81B7_\xc4\v\x04\xa7\xa9A\x85\n100644 Makefile\x00-ҭ\x8c\x14\xdef\x12\xed\x15\x816y\xa6UK\xad\x993\v100644 binary-relations.tex\x00I\x13~\xb8کEU\x9f\x99#\xc4E.\x9d>\uef1e\xad40000 graphs\x00\xb9\x00\xf34\xde\xff\xce@+\xbd\xf8 9\xb8=\xc1\xb9\x00\x84]40000 imgs-gen\x00\xeb\"\xddhzg\xa3\x1f\xc8j\xc5\xfc豢\xe9\x96\xce\xce^40000 src\x00\x895\x11t\xff\x86\xa7\xea\xa6\xc0v%\x11E\x10f,ݒ\x1a"),
- },
- } {
- com := Commentf("test %d) fixture id = %s", i, test.fixID)
- fix, ok := s.fixtures[test.fixID]
- c.Assert(ok, Equals, true, com)
-
- err := fix.seek(test.offset)
- c.Assert(err, IsNil, com)
- p := fix.parser
-
- _, _, err = p.ReadObjectTypeAndLength()
- c.Assert(err, IsNil, com)
-
- beforeJumpSize, err := p.Offset()
- c.Assert(err, IsNil, com)
-
- jump, err := p.ReadNegativeOffset()
- c.Assert(err, IsNil, com)
- c.Assert(jump, Equals, test.expOffset, com)
-
- err = fix.seek(beforeJumpSize)
- c.Assert(err, IsNil, com)
-
- obj := &core.MemoryObject{}
- err = p.FillOFSDeltaObjectContent(obj, test.offset)
- c.Assert(err, IsNil, com)
- c.Assert(obj.Type(), Equals, test.expType, com)
-
- r, _ := obj.Reader()
- bytes, _ := ioutil.ReadAll(r)
- c.Assert(bytes, DeepEquals, test.expContent, com)
- }
-}
-
-func (s *ParserSuite) TestFillREFDeltaObjectContent(c *C) {
- for i, test := range [...]struct {
- fixID string
- offset int64
- deps map[int64]core.Object
- expHash core.Hash
- expType core.ObjectType
- expContent []byte
- }{
- {
- fixID: "ref-deltas",
- offset: 84849,
- deps: map[int64]core.Object{
- 83607: newObject(core.TreeObject, []byte("100644 .gitignore\x002\x85\x8a\xad<8>\xd1\xff\n\x0f\x9b\xdf#\x1dT\xa0\f\x9e\x88100644 CHANGELOG\x00\xd3\xffS\xe0VJ\x9f\x87\xd8\xe8Kn(\xe5\x06\x0eQp\b\xaa100644 LICENSE\x00\xc1\x92\xbdj$\xea\x1a\xb0\x1dxhnA|\x8b\xdc|=\x19\u007f100644 binary.jpg\x00\xd5\xc0\xf4\xab\x81\x18\x97\xca\xdf\x03\xae\xc3X\xae`\xd2\x1f\x91\xc5\r40000 go\x00\xa3\x97q\xa7e\x1f\x97\xfa\xf5\xc7.\b\"M\x85\u007f\xc3Q3\xdb40000 json\x00Z\x87~j\x90j'C\xadnEٜ\x17\x93d*\xaf\x8e\xda40000 php\x00Xj\xf5gл^w\x1eI\xbd\xd9CO^\x0f\xb7m%\xfa40000 vendor\x00\xcfJ\xa3\xb3\x89t\xfb}\x81\xf3g\xc0\x83\x0f}x\xd6Z\xb8k")),
- },
- expHash: core.NewHash("a8d315b2b1c615d43042c3a62402b8a54288cf5c"),
- expType: core.TreeObject,
- expContent: []byte("100644 .gitignore\x002\x85\x8a\xad<8>\xd1\xff\n\x0f\x9b\xdf#\x1dT\xa0\f\x9e\x88100644 CHANGELOG\x00\xd3\xffS\xe0VJ\x9f\x87\xd8\xe8Kn(\xe5\x06\x0eQp\b\xaa100644 LICENSE\x00\xc1\x92\xbdj$\xea\x1a\xb0\x1dxhnA|\x8b\xdc|=\x19\u007f100644 binary.jpg\x00\xd5\xc0\xf4\xab\x81\x18\x97\xca\xdf\x03\xae\xc3X\xae`\xd2\x1f\x91\xc5\r40000 go\x00\xa3\x97q\xa7e\x1f\x97\xfa\xf5\xc7.\b\"M\x85\u007f\xc3Q3\xdb40000 json\x00Z\x87~j\x90j'C\xadnEٜ\x17\x93d*\xaf\x8e\xda40000 php\x00Xj\xf5gл^w\x1eI\xbd\xd9CO^\x0f\xb7m%\xfa"),
- }, {
- fixID: "ref-deltas",
- offset: 85070,
- deps: map[int64]core.Object{
- 84922: newObject(core.TreeObject, []byte("100644 .gitignore\x002\x85\x8a\xad<8>\xd1\xff\n\x0f\x9b\xdf#\x1dT\xa0\f\x9e\x88100644 CHANGELOG\x00\xd3\xffS\xe0VJ\x9f\x87\xd8\xe8Kn(\xe5\x06\x0eQp\b\xaa100644 LICENSE\x00\xc1\x92\xbdj$\xea\x1a\xb0\x1dxhnA|\x8b\xdc|=\x19\u007f100644 binary.jpg\x00\xd5\xc0\xf4\xab\x81\x18\x97\xca\xdf\x03\xae\xc3X\xae`\xd2\x1f\x91\xc5\r")),
- 84849: newObject(core.TreeObject, []byte("100644 .gitignore\x002\x85\x8a\xad<8>\xd1\xff\n\x0f\x9b\xdf#\x1dT\xa0\f\x9e\x88100644 CHANGELOG\x00\xd3\xffS\xe0VJ\x9f\x87\xd8\xe8Kn(\xe5\x06\x0eQp\b\xaa100644 LICENSE\x00\xc1\x92\xbdj$\xea\x1a\xb0\x1dxhnA|\x8b\xdc|=\x19\u007f100644 binary.jpg\x00\xd5\xc0\xf4\xab\x81\x18\x97\xca\xdf\x03\xae\xc3X\xae`\xd2\x1f\x91\xc5\r40000 go\x00\xa3\x97q\xa7e\x1f\x97\xfa\xf5\xc7.\b\"M\x85\u007f\xc3Q3\xdb40000 json\x00Z\x87~j\x90j'C\xadnEٜ\x17\x93d*\xaf\x8e\xda40000 php\x00Xj\xf5gл^w\x1eI\xbd\xd9CO^\x0f\xb7m%\xfa")),
- 83607: newObject(core.TreeObject, []byte("100644 .gitignore\x002\x85\x8a\xad<8>\xd1\xff\n\x0f\x9b\xdf#\x1dT\xa0\f\x9e\x88100644 CHANGELOG\x00\xd3\xffS\xe0VJ\x9f\x87\xd8\xe8Kn(\xe5\x06\x0eQp\b\xaa100644 LICENSE\x00\xc1\x92\xbdj$\xea\x1a\xb0\x1dxhnA|\x8b\xdc|=\x19\u007f100644 binary.jpg\x00\xd5\xc0\xf4\xab\x81\x18\x97\xca\xdf\x03\xae\xc3X\xae`\xd2\x1f\x91\xc5\r40000 go\x00\xa3\x97q\xa7e\x1f\x97\xfa\xf5\xc7.\b\"M\x85\u007f\xc3Q3\xdb40000 json\x00Z\x87~j\x90j'C\xadnEٜ\x17\x93d*\xaf\x8e\xda40000 php\x00Xj\xf5gл^w\x1eI\xbd\xd9CO^\x0f\xb7m%\xfa40000 vendor\x00\xcfJ\xa3\xb3\x89t\xfb}\x81\xf3g\xc0\x83\x0f}x\xd6Z\xb8k")),
- },
- expHash: core.NewHash("eba74343e2f15d62adedfd8c883ee0262b5c8021"),
- expType: core.TreeObject,
- expContent: []byte("100644 .gitignore\x002\x85\x8a\xad<8>\xd1\xff\n\x0f\x9b\xdf#\x1dT\xa0\f\x9e\x88100644 LICENSE\x00\xc1\x92\xbdj$\xea\x1a\xb0\x1dxhnA|\x8b\xdc|=\x19\u007f100644 binary.jpg\x00\xd5\xc0\xf4\xab\x81\x18\x97\xca\xdf\x03\xae\xc3X\xae`\xd2\x1f\x91\xc5\r"),
- },
- } {
- com := Commentf("test %d) fixture id = %s", i, test.fixID)
- fix, ok := s.fixtures[test.fixID]
- c.Assert(ok, Equals, true, com)
-
- err := fix.seek(test.offset)
- c.Assert(err, IsNil, com)
- p := fix.parser
- for k, v := range test.deps {
- err = p.Remember(k, v)
- c.Assert(err, IsNil, com)
- }
-
- _, _, err = p.ReadObjectTypeAndLength()
- c.Assert(err, IsNil, com)
-
- beforeHash, err := p.Offset()
- c.Assert(err, IsNil, com)
-
- hash, err := p.ReadHash()
- c.Assert(err, IsNil, com)
- c.Assert(hash, Equals, test.expHash, com)
-
- err = fix.seek(beforeHash)
- c.Assert(err, IsNil, com)
-
- obj := &core.MemoryObject{}
- err = p.FillREFDeltaObjectContent(obj)
- c.Assert(err, IsNil, com)
- c.Assert(obj.Type(), Equals, test.expType, com)
-
- r, _ := obj.Reader()
- bytes, _ := ioutil.ReadAll(r)
- c.Assert(bytes, DeepEquals, test.expContent, com)
-
- p.ForgetAll()
- }
-}
-
-func newObject(t core.ObjectType, c []byte) core.Object {
- o := &core.MemoryObject{}
- o.SetType(t)
- o.SetSize(int64(len(c)))
- o.Write(c)
-
- return o
-}
-
-func (s *ParserSuite) TestReadHeaderBadSignatureError(c *C) {
- data := []byte{
- 0x50, 0x42, 0x43, 0x4b, 0x00, 0x00, 0x00, 0x02,
- 0x00, 0x00, 0x00, 0x50,
- }
- p := NewParser(NewSeekable(bytes.NewReader(data)))
-
- _, err := p.ReadHeader()
- c.Assert(err, ErrorMatches, ErrBadSignature.Error())
-}
-
-func (s *ParserSuite) TestReadHeaderEmptyPackfileError(c *C) {
- data := []byte{}
- p := NewParser(NewSeekable(bytes.NewReader(data)))
-
- _, err := p.ReadHeader()
- c.Assert(err, ErrorMatches, ErrEmptyPackfile.Error())
-}
-
-func (s *ParserSuite) TestReadHeaderUnsupportedVersionError(c *C) {
- data := []byte{
- 0x50, 0x41, 0x43, 0x4b, 0x00, 0x00, 0x00, 0x01,
- 0x00, 0x00, 0x00, 0x50,
- }
- p := NewParser(NewSeekable(bytes.NewReader(data)))
-
- _, err := p.ReadHeader()
- c.Assert(err, ErrorMatches, ErrUnsupportedVersion.Error()+".*")
-}
-
-func (s *ParserSuite) TestReadHeader(c *C) {
- data := []byte{
- 0x50, 0x41, 0x43, 0x4b, 0x00, 0x00, 0x00, 0x02,
- 0x00, 0x00, 0x00, 0x50,
- }
- p := NewParser(NewSeekable(bytes.NewReader(data)))
-
- count, err := p.ReadHeader()
+ n, err := p.Checksum()
c.Assert(err, IsNil)
- c.Assert(count, Equals, uint32(0x50))
+ c.Assert(n, Equals, f.PackfileHash)
+}
+
+var expectedHeadersOFS = []ObjectHeader{
+ {Type: core.CommitObject, Offset: 12, Length: 254},
+ {Type: core.OFSDeltaObject, Offset: 186, Length: 93, OffsetReference: 12},
+ {Type: core.CommitObject, Offset: 286, Length: 242},
+ {Type: core.CommitObject, Offset: 449, Length: 242},
+ {Type: core.CommitObject, Offset: 615, Length: 333},
+ {Type: core.CommitObject, Offset: 838, Length: 332},
+ {Type: core.CommitObject, Offset: 1063, Length: 244},
+ {Type: core.CommitObject, Offset: 1230, Length: 243},
+ {Type: core.CommitObject, Offset: 1392, Length: 187},
+ {Type: core.BlobObject, Offset: 1524, Length: 189},
+ {Type: core.BlobObject, Offset: 1685, Length: 18},
+ {Type: core.BlobObject, Offset: 1713, Length: 1072},
+ {Type: core.BlobObject, Offset: 2351, Length: 76110},
+ {Type: core.BlobObject, Offset: 78050, Length: 2780},
+ {Type: core.BlobObject, Offset: 78882, Length: 217848},
+ {Type: core.BlobObject, Offset: 80725, Length: 706},
+ {Type: core.BlobObject, Offset: 80998, Length: 11488},
+ {Type: core.BlobObject, Offset: 84032, Length: 78},
+ {Type: core.TreeObject, Offset: 84115, Length: 272},
+ {Type: core.OFSDeltaObject, Offset: 84375, Length: 43, OffsetReference: 84115},
+ {Type: core.TreeObject, Offset: 84430, Length: 38},
+ {Type: core.TreeObject, Offset: 84479, Length: 75},
+ {Type: core.TreeObject, Offset: 84559, Length: 38},
+ {Type: core.TreeObject, Offset: 84608, Length: 34},
+ {Type: core.BlobObject, Offset: 84653, Length: 9},
+ {Type: core.OFSDeltaObject, Offset: 84671, Length: 6, OffsetReference: 84375},
+ {Type: core.OFSDeltaObject, Offset: 84688, Length: 9, OffsetReference: 84375},
+ {Type: core.OFSDeltaObject, Offset: 84708, Length: 6, OffsetReference: 84375},
+ {Type: core.OFSDeltaObject, Offset: 84725, Length: 5, OffsetReference: 84115},
+ {Type: core.OFSDeltaObject, Offset: 84741, Length: 8, OffsetReference: 84375},
+ {Type: core.OFSDeltaObject, Offset: 84760, Length: 4, OffsetReference: 84741},
+}
+
+var expectedHeadersREF = []ObjectHeader{
+ {Type: core.CommitObject, Offset: 12, Length: 254},
+ {Type: core.REFDeltaObject, Offset: 186, Length: 93,
+ Reference: core.NewHash("e8d3ffab552895c19b9fcf7aa264d277cde33881")},
+ {Type: core.CommitObject, Offset: 304, Length: 242},
+ {Type: core.CommitObject, Offset: 467, Length: 242},
+ {Type: core.CommitObject, Offset: 633, Length: 333},
+ {Type: core.CommitObject, Offset: 856, Length: 332},
+ {Type: core.CommitObject, Offset: 1081, Length: 243},
+ {Type: core.CommitObject, Offset: 1243, Length: 244},
+ {Type: core.CommitObject, Offset: 1410, Length: 187},
+ {Type: core.BlobObject, Offset: 1542, Length: 189},
+ {Type: core.BlobObject, Offset: 1703, Length: 18},
+ {Type: core.BlobObject, Offset: 1731, Length: 1072},
+ {Type: core.BlobObject, Offset: 2369, Length: 76110},
+ {Type: core.TreeObject, Offset: 78068, Length: 38},
+ {Type: core.BlobObject, Offset: 78117, Length: 2780},
+ {Type: core.TreeObject, Offset: 79049, Length: 75},
+ {Type: core.BlobObject, Offset: 79129, Length: 217848},
+ {Type: core.BlobObject, Offset: 80972, Length: 706},
+ {Type: core.TreeObject, Offset: 81265, Length: 38},
+ {Type: core.BlobObject, Offset: 81314, Length: 11488},
+ {Type: core.TreeObject, Offset: 84752, Length: 34},
+ {Type: core.BlobObject, Offset: 84797, Length: 78},
+ {Type: core.TreeObject, Offset: 84880, Length: 271},
+ {Type: core.REFDeltaObject, Offset: 85141, Length: 6,
+ Reference: core.NewHash("a8d315b2b1c615d43042c3a62402b8a54288cf5c")},
+ {Type: core.REFDeltaObject, Offset: 85176, Length: 37,
+ Reference: core.NewHash("fb72698cab7617ac416264415f13224dfd7a165e")},
+ {Type: core.BlobObject, Offset: 85244, Length: 9},
+ {Type: core.REFDeltaObject, Offset: 85262, Length: 9,
+ Reference: core.NewHash("fb72698cab7617ac416264415f13224dfd7a165e")},
+ {Type: core.REFDeltaObject, Offset: 85300, Length: 6,
+ Reference: core.NewHash("fb72698cab7617ac416264415f13224dfd7a165e")},
+ {Type: core.TreeObject, Offset: 85335, Length: 110},
+ {Type: core.REFDeltaObject, Offset: 85448, Length: 8,
+ Reference: core.NewHash("eba74343e2f15d62adedfd8c883ee0262b5c8021")},
+ {Type: core.TreeObject, Offset: 85485, Length: 73},
}
-*/