aboutsummaryrefslogtreecommitdiffstats
path: root/formats/packp/advrefs
diff options
context:
space:
mode:
Diffstat (limited to 'formats/packp/advrefs')
-rw-r--r--formats/packp/advrefs/advrefs.go58
-rw-r--r--formats/packp/advrefs/advrefs_test.go315
-rw-r--r--formats/packp/advrefs/decoder.go288
-rw-r--r--formats/packp/advrefs/decoder_test.go500
-rw-r--r--formats/packp/advrefs/encoder.go155
-rw-r--r--formats/packp/advrefs/encoder_test.go249
6 files changed, 0 insertions, 1565 deletions
diff --git a/formats/packp/advrefs/advrefs.go b/formats/packp/advrefs/advrefs.go
deleted file mode 100644
index ab4bcf5..0000000
--- a/formats/packp/advrefs/advrefs.go
+++ /dev/null
@@ -1,58 +0,0 @@
-// Package advrefs implements encoding and decoding advertised-refs
-// messages from a git-upload-pack command.
-package advrefs
-
-import (
- "gopkg.in/src-d/go-git.v4/core"
- "gopkg.in/src-d/go-git.v4/formats/packp"
-)
-
-const (
- hashSize = 40
- head = "HEAD"
- noHead = "capabilities^{}"
-)
-
-var (
- sp = []byte(" ")
- null = []byte("\x00")
- eol = []byte("\n")
- peeled = []byte("^{}")
- shallow = []byte("shallow ")
- noHeadMark = []byte(" capabilities^{}\x00")
-)
-
-// AdvRefs values represent the information transmitted on an
-// advertised-refs message. Values from this type are not zero-value
-// safe, use the New function instead.
-//
-// When using this messages over (smart) HTTP, you have to add a pktline
-// before the whole thing with the following payload:
-//
-// '# service=$servicename" LF
-//
-// Moreover, some (all) git HTTP smart servers will send a flush-pkt
-// just after the first pkt-line.
-//
-// To accomodate both situations, the Prefix field allow you to store
-// any data you want to send before the actual pktlines. It will also
-// be filled up with whatever is found on the line.
-type AdvRefs struct {
- Prefix [][]byte // payloads of the prefix
- Head *core.Hash
- Capabilities *packp.Capabilities
- References map[string]core.Hash
- Peeled map[string]core.Hash
- Shallows []core.Hash
-}
-
-// New returns a pointer to a new AdvRefs value, ready to be used.
-func New() *AdvRefs {
- return &AdvRefs{
- Prefix: [][]byte{},
- Capabilities: packp.NewCapabilities(),
- References: make(map[string]core.Hash),
- Peeled: make(map[string]core.Hash),
- Shallows: []core.Hash{},
- }
-}
diff --git a/formats/packp/advrefs/advrefs_test.go b/formats/packp/advrefs/advrefs_test.go
deleted file mode 100644
index 6950ba5..0000000
--- a/formats/packp/advrefs/advrefs_test.go
+++ /dev/null
@@ -1,315 +0,0 @@
-package advrefs_test
-
-import (
- "bytes"
- "fmt"
- "io"
- "strings"
- "testing"
-
- "gopkg.in/src-d/go-git.v4/core"
- "gopkg.in/src-d/go-git.v4/formats/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/formats/packp/pktline"
-
- . "gopkg.in/check.v1"
-)
-
-func Test(t *testing.T) { TestingT(t) }
-
-type SuiteDecodeEncode struct{}
-
-var _ = Suite(&SuiteDecodeEncode{})
-
-func (s *SuiteDecodeEncode) test(c *C, in []string, exp []string) {
- var err error
- var input io.Reader
- {
- var buf bytes.Buffer
- p := pktline.NewEncoder(&buf)
- err = p.EncodeString(in...)
- c.Assert(err, IsNil)
- input = &buf
- }
-
- var expected []byte
- {
- var buf bytes.Buffer
- p := pktline.NewEncoder(&buf)
- err = p.EncodeString(exp...)
- c.Assert(err, IsNil)
-
- expected = buf.Bytes()
- }
-
- var obtained []byte
- {
- ar := advrefs.New()
- d := advrefs.NewDecoder(input)
- err = d.Decode(ar)
- c.Assert(err, IsNil)
-
- var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
- err := e.Encode(ar)
- c.Assert(err, IsNil)
-
- obtained = buf.Bytes()
- }
-
- c.Assert(obtained, DeepEquals, expected,
- Commentf("input = %v\nobtained = %q\nexpected = %q\n",
- in, string(obtained), string(expected)))
-}
-
-func (s *SuiteDecodeEncode) TestNoHead(c *C) {
- input := []string{
- "0000000000000000000000000000000000000000 capabilities^{}\x00",
- pktline.FlushString,
- }
-
- expected := []string{
- "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestNoHeadSmart(c *C) {
- input := []string{
- "# service=git-upload-pack\n",
- "0000000000000000000000000000000000000000 capabilities^{}\x00",
- pktline.FlushString,
- }
-
- expected := []string{
- "# service=git-upload-pack\n",
- "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestNoHeadSmartBug(c *C) {
- input := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestRefs(c *C) {
- input := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree",
- pktline.FlushString,
- }
-
- expected := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestPeeled(c *C) {
- input := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestAll(c *C) {
- input := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}",
- "shallow 1111111111111111111111111111111111111111",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestAllSmart(c *C) {
- input := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestAllSmartBug(c *C) {
- input := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func ExampleDecoder_Decode() {
- // Here is a raw advertised-ref message.
- raw := "" +
- "0065a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n" +
- "003fa6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n" +
- "00441111111111111111111111111111111111111111 refs/tags/v2.6.11-tree\n" +
- "00475555555555555555555555555555555555555555 refs/tags/v2.6.11-tree^{}\n" +
- "0035shallow 5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c\n" +
- "0000"
-
- // Use the raw message as our input.
- input := strings.NewReader(raw)
-
- // Create a advref.Decoder reading from our input.
- d := advrefs.NewDecoder(input)
-
- // Decode the input into a newly allocated AdvRefs value.
- ar := advrefs.New()
- _ = d.Decode(ar) // error check ignored for brevity
-
- // Do something interesting with the AdvRefs, e.g. print its contents.
- fmt.Println("head =", ar.Head)
- fmt.Println("capabilities =", ar.Capabilities.String())
- fmt.Println("...")
- fmt.Println("shallows =", ar.Shallows)
- // Output: head = a6930aaee06755d1bdcfd943fbf614e4d92bb0c7
- // capabilities = multi_ack ofs-delta symref=HEAD:/refs/heads/master
- // ...
- // shallows = [5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c]
-}
-
-func ExampleEncoder_Encode() {
- // Create an AdvRefs with the contents you want...
- ar := advrefs.New()
-
- // ...add a hash for the HEAD...
- head := core.NewHash("1111111111111111111111111111111111111111")
- ar.Head = &head
-
- // ...add some server capabilities...
- ar.Capabilities.Add("symref", "HEAD:/refs/heads/master")
- ar.Capabilities.Add("ofs-delta")
- ar.Capabilities.Add("multi_ack")
-
- // ...add a couple of references...
- ar.References["refs/heads/master"] = core.NewHash("2222222222222222222222222222222222222222")
- ar.References["refs/tags/v1"] = core.NewHash("3333333333333333333333333333333333333333")
-
- // ...including a peeled ref...
- ar.Peeled["refs/tags/v1"] = core.NewHash("4444444444444444444444444444444444444444")
-
- // ...and finally add a shallow
- ar.Shallows = append(ar.Shallows, core.NewHash("5555555555555555555555555555555555555555"))
-
- // Encode the advrefs.Contents to a bytes.Buffer.
- // You can encode into stdout too, but you will not be able
- // see the '\x00' after "HEAD".
- var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
- _ = e.Encode(ar) // error checks ignored for brevity
-
- // Print the contents of the buffer as a quoted string.
- // Printing is as a non-quoted string will be prettier but you
- // will miss the '\x00' after "HEAD".
- fmt.Printf("%q", buf.String())
- // Output:
- // "00651111111111111111111111111111111111111111 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n003f2222222222222222222222222222222222222222 refs/heads/master\n003a3333333333333333333333333333333333333333 refs/tags/v1\n003d4444444444444444444444444444444444444444 refs/tags/v1^{}\n0035shallow 5555555555555555555555555555555555555555\n0000"
-}
diff --git a/formats/packp/advrefs/decoder.go b/formats/packp/advrefs/decoder.go
deleted file mode 100644
index a0cf5e6..0000000
--- a/formats/packp/advrefs/decoder.go
+++ /dev/null
@@ -1,288 +0,0 @@
-package advrefs
-
-import (
- "bytes"
- "encoding/hex"
- "errors"
- "fmt"
- "io"
-
- "gopkg.in/src-d/go-git.v4/core"
- "gopkg.in/src-d/go-git.v4/formats/packp/pktline"
-)
-
-// A Decoder reads and decodes AdvRef values from an input stream.
-type Decoder struct {
- s *pktline.Scanner // a pkt-line scanner from the input stream
- line []byte // current pkt-line contents, use parser.nextLine() to make it advance
- nLine int // current pkt-line number for debugging, begins at 1
- hash core.Hash // last hash read
- err error // sticky error, use the parser.error() method to fill this out
- data *AdvRefs // parsed data is stored here
-}
-
-// ErrEmpty is returned by Decode when there was no advertised-message at all
-var ErrEmpty = errors.New("empty advertised-ref message")
-
-// NewDecoder returns a new decoder that reads from r.
-//
-// Will not read more data from r than necessary.
-func NewDecoder(r io.Reader) *Decoder {
- return &Decoder{
- s: pktline.NewScanner(r),
- }
-}
-
-// Decode reads the next advertised-refs message form its input and
-// stores it in the value pointed to by v.
-func (d *Decoder) Decode(v *AdvRefs) error {
- d.data = v
-
- for state := decodePrefix; state != nil; {
- state = state(d)
- }
-
- return d.err
-}
-
-type decoderStateFn func(*Decoder) decoderStateFn
-
-// fills out the parser stiky error
-func (d *Decoder) error(format string, a ...interface{}) {
- d.err = fmt.Errorf("pkt-line %d: %s", d.nLine,
- fmt.Sprintf(format, a...))
-}
-
-// Reads a new pkt-line from the scanner, makes its payload available as
-// p.line and increments p.nLine. A successful invocation returns true,
-// otherwise, false is returned and the sticky error is filled out
-// accordingly. Trims eols at the end of the payloads.
-func (d *Decoder) nextLine() bool {
- d.nLine++
-
- if !d.s.Scan() {
- if d.err = d.s.Err(); d.err != nil {
- return false
- }
-
- if d.nLine == 1 {
- d.err = ErrEmpty
- return false
- }
-
- d.error("EOF")
- return false
- }
-
- d.line = d.s.Bytes()
- d.line = bytes.TrimSuffix(d.line, eol)
-
- return true
-}
-
-// The HTTP smart prefix is often followed by a flush-pkt.
-func decodePrefix(d *Decoder) decoderStateFn {
- if ok := d.nextLine(); !ok {
- return nil
- }
-
- if isPrefix(d.line) {
- tmp := make([]byte, len(d.line))
- copy(tmp, d.line)
- d.data.Prefix = append(d.data.Prefix, tmp)
- if ok := d.nextLine(); !ok {
- return nil
- }
- }
-
- if isFlush(d.line) {
- d.data.Prefix = append(d.data.Prefix, pktline.Flush)
- if ok := d.nextLine(); !ok {
- return nil
- }
- }
-
- return decodeFirstHash
-}
-
-func isPrefix(payload []byte) bool {
- return payload[0] == '#'
-}
-
-func isFlush(payload []byte) bool {
- return len(payload) == 0
-}
-
-// If the first hash is zero, then a no-refs is comming. Otherwise, a
-// list-of-refs is comming, and the hash will be followed by the first
-// advertised ref.
-func decodeFirstHash(p *Decoder) decoderStateFn {
- if len(p.line) < hashSize {
- p.error("cannot read hash, pkt-line too short")
- return nil
- }
-
- if _, err := hex.Decode(p.hash[:], p.line[:hashSize]); err != nil {
- p.error("invalid hash text: %s", err)
- return nil
- }
-
- p.line = p.line[hashSize:]
-
- if p.hash.IsZero() {
- return decodeSkipNoRefs
- }
-
- return decodeFirstRef
-}
-
-// Skips SP "capabilities^{}" NUL
-func decodeSkipNoRefs(p *Decoder) decoderStateFn {
- if len(p.line) < len(noHeadMark) {
- p.error("too short zero-id ref")
- return nil
- }
-
- if !bytes.HasPrefix(p.line, noHeadMark) {
- p.error("malformed zero-id ref")
- return nil
- }
-
- p.line = p.line[len(noHeadMark):]
-
- return decodeCaps
-}
-
-// decode the refname, expectes SP refname NULL
-func decodeFirstRef(l *Decoder) decoderStateFn {
- if len(l.line) < 3 {
- l.error("line too short after hash")
- return nil
- }
-
- if !bytes.HasPrefix(l.line, sp) {
- l.error("no space after hash")
- return nil
- }
- l.line = l.line[1:]
-
- chunks := bytes.SplitN(l.line, null, 2)
- if len(chunks) < 2 {
- l.error("NULL not found")
- return nil
- }
- ref := chunks[0]
- l.line = chunks[1]
-
- if bytes.Equal(ref, []byte(head)) {
- l.data.Head = &l.hash
- } else {
- l.data.References[string(ref)] = l.hash
- }
-
- return decodeCaps
-}
-
-func decodeCaps(p *Decoder) decoderStateFn {
- if len(p.line) == 0 {
- return decodeOtherRefs
- }
-
- for _, c := range bytes.Split(p.line, sp) {
- name, values := readCapability(c)
- p.data.Capabilities.Add(name, values...)
- }
-
- return decodeOtherRefs
-}
-
-// Capabilities are a single string or a name=value.
-// Even though we are only going to read at moust 1 value, we return
-// a slice of values, as Capability.Add receives that.
-func readCapability(data []byte) (name string, values []string) {
- pair := bytes.SplitN(data, []byte{'='}, 2)
- if len(pair) == 2 {
- values = append(values, string(pair[1]))
- }
-
- return string(pair[0]), values
-}
-
-// The refs are either tips (obj-id SP refname) or a peeled (obj-id SP refname^{}).
-// If there are no refs, then there might be a shallow or flush-ptk.
-func decodeOtherRefs(p *Decoder) decoderStateFn {
- if ok := p.nextLine(); !ok {
- return nil
- }
-
- if bytes.HasPrefix(p.line, shallow) {
- return decodeShallow
- }
-
- if len(p.line) == 0 {
- return nil
- }
-
- saveTo := p.data.References
- if bytes.HasSuffix(p.line, peeled) {
- p.line = bytes.TrimSuffix(p.line, peeled)
- saveTo = p.data.Peeled
- }
-
- ref, hash, err := readRef(p.line)
- if err != nil {
- p.error("%s", err)
- return nil
- }
- saveTo[ref] = hash
-
- return decodeOtherRefs
-}
-
-// Reads a ref-name
-func readRef(data []byte) (string, core.Hash, error) {
- chunks := bytes.Split(data, sp)
- switch {
- case len(chunks) == 1:
- return "", core.ZeroHash, fmt.Errorf("malformed ref data: no space was found")
- case len(chunks) > 2:
- return "", core.ZeroHash, fmt.Errorf("malformed ref data: more than one space found")
- default:
- return string(chunks[1]), core.NewHash(string(chunks[0])), nil
- }
-}
-
-// Keeps reading shallows until a flush-pkt is found
-func decodeShallow(p *Decoder) decoderStateFn {
- if !bytes.HasPrefix(p.line, shallow) {
- p.error("malformed shallow prefix, found %q... instead", p.line[:len(shallow)])
- return nil
- }
- p.line = bytes.TrimPrefix(p.line, shallow)
-
- if len(p.line) != hashSize {
- p.error(fmt.Sprintf(
- "malformed shallow hash: wrong length, expected 40 bytes, read %d bytes",
- len(p.line)))
- return nil
- }
-
- text := p.line[:hashSize]
- var h core.Hash
- if _, err := hex.Decode(h[:], text); err != nil {
- p.error("invalid hash text: %s", err)
- return nil
- }
-
- p.data.Shallows = append(p.data.Shallows, h)
-
- if ok := p.nextLine(); !ok {
- return nil
- }
-
- if len(p.line) == 0 {
- return nil // succesfull parse of the advertised-refs message
- }
-
- return decodeShallow
-}
diff --git a/formats/packp/advrefs/decoder_test.go b/formats/packp/advrefs/decoder_test.go
deleted file mode 100644
index ee2f5ae..0000000
--- a/formats/packp/advrefs/decoder_test.go
+++ /dev/null
@@ -1,500 +0,0 @@
-package advrefs_test
-
-import (
- "bytes"
- "io"
- "strings"
-
- "gopkg.in/src-d/go-git.v4/core"
- "gopkg.in/src-d/go-git.v4/formats/packp"
- "gopkg.in/src-d/go-git.v4/formats/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/formats/packp/pktline"
-
- . "gopkg.in/check.v1"
-)
-
-type SuiteDecoder struct{}
-
-var _ = Suite(&SuiteDecoder{})
-
-func (s *SuiteDecoder) TestEmpty(c *C) {
- ar := advrefs.New()
- var buf bytes.Buffer
- d := advrefs.NewDecoder(&buf)
-
- err := d.Decode(ar)
- c.Assert(err, Equals, advrefs.ErrEmpty)
-}
-
-func (s *SuiteDecoder) TestShortForHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*too short")
-}
-
-func toPktLines(c *C, payloads []string) io.Reader {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil)
-
- return &buf
-}
-
-func testDecoderErrorMatches(c *C, input io.Reader, pattern string) {
- ar := advrefs.New()
- d := advrefs.NewDecoder(input)
-
- err := d.Decode(ar)
- c.Assert(err, ErrorMatches, pattern)
-}
-
-func (s *SuiteDecoder) TestInvalidFirstHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796alberto2219af86ec6584e5 HEAD\x00multi_ack thin-pack\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid hash.*")
-}
-
-func (s *SuiteDecoder) TestZeroId(c *C) {
- payloads := []string{
- "0000000000000000000000000000000000000000 capabilities^{}\x00multi_ack thin-pack\n",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(ar.Head, IsNil)
-}
-
-func testDecodeOK(c *C, payloads []string) *advrefs.AdvRefs {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil)
-
- ar := advrefs.New()
- d := advrefs.NewDecoder(&buf)
-
- err = d.Decode(ar)
- c.Assert(err, IsNil)
-
- return ar
-}
-
-func (s *SuiteDecoder) TestMalformedZeroId(c *C) {
- payloads := []string{
- "0000000000000000000000000000000000000000 wrong\x00multi_ack thin-pack\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed zero-id.*")
-}
-
-func (s *SuiteDecoder) TestShortZeroId(c *C) {
- payloads := []string{
- "0000000000000000000000000000000000000000 capabi",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*too short zero-id.*")
-}
-
-func (s *SuiteDecoder) TestHead(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(*ar.Head, Equals,
- core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"))
-}
-
-func (s *SuiteDecoder) TestFirstIsNotHead(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 refs/heads/master\x00",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(ar.Head, IsNil)
- c.Assert(ar.References["refs/heads/master"], Equals,
- core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"))
-}
-
-func (s *SuiteDecoder) TestShortRef(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 H",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*too short.*")
-}
-
-func (s *SuiteDecoder) TestNoNULL(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEADofs-delta multi_ack",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*NULL not found.*")
-}
-
-func (s *SuiteDecoder) TestNoSpaceAfterHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5-HEAD\x00",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*no space after hash.*")
-}
-
-func (s *SuiteDecoder) TestNoCaps(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(ar.Capabilities.IsEmpty(), Equals, true)
-}
-
-func (s *SuiteDecoder) TestCaps(c *C) {
- for _, test := range [...]struct {
- input []string
- capabilities []packp.Capability
- }{
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{},
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{},
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {
- Name: "ofs-delta",
- Values: []string(nil),
- },
- },
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {Name: "ofs-delta", Values: []string(nil)},
- {Name: "multi_ack", Values: []string(nil)},
- },
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack\n",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {Name: "ofs-delta", Values: []string(nil)},
- {Name: "multi_ack", Values: []string(nil)},
- },
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:refs/heads/master agent=foo=bar\n",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {Name: "symref", Values: []string{"HEAD:refs/heads/master"}},
- {Name: "agent", Values: []string{"foo=bar"}},
- },
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:refs/heads/master agent=foo=bar agent=new-agent\n",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {Name: "symref", Values: []string{"HEAD:refs/heads/master"}},
- {Name: "agent", Values: []string{"foo=bar", "new-agent"}},
- },
- },
- } {
- ar := testDecodeOK(c, test.input)
- for _, fixCap := range test.capabilities {
- c.Assert(ar.Capabilities.Supports(fixCap.Name), Equals, true,
- Commentf("input = %q, capability = %q", test.input, fixCap.Name))
- c.Assert(ar.Capabilities.Get(fixCap.Name).Values, DeepEquals, fixCap.Values,
- Commentf("input = %q, capability = %q", test.input, fixCap.Name))
- }
- }
-}
-
-func (s *SuiteDecoder) TestWithPrefix(c *C) {
- payloads := []string{
- "# this is a prefix\n",
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00foo\n",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(len(ar.Prefix), Equals, 1)
- c.Assert(ar.Prefix[0], DeepEquals, []byte("# this is a prefix"))
-}
-
-func (s *SuiteDecoder) TestWithPrefixAndFlush(c *C) {
- payloads := []string{
- "# this is a prefix\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00foo\n",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(len(ar.Prefix), Equals, 2)
- c.Assert(ar.Prefix[0], DeepEquals, []byte("# this is a prefix"))
- c.Assert(ar.Prefix[1], DeepEquals, []byte(pktline.FlushString))
-}
-
-func (s *SuiteDecoder) TestOtherRefs(c *C) {
- for _, test := range [...]struct {
- input []string
- references map[string]core.Hash
- peeled map[string]core.Hash
- }{
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- pktline.FlushString,
- },
- references: make(map[string]core.Hash),
- peeled: make(map[string]core.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo",
- pktline.FlushString,
- },
- references: map[string]core.Hash{
- "ref/foo": core.NewHash("1111111111111111111111111111111111111111"),
- },
- peeled: make(map[string]core.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo\n",
- pktline.FlushString,
- },
- references: map[string]core.Hash{
- "ref/foo": core.NewHash("1111111111111111111111111111111111111111"),
- },
- peeled: make(map[string]core.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo\n",
- "2222222222222222222222222222222222222222 ref/bar",
- pktline.FlushString,
- },
- references: map[string]core.Hash{
- "ref/foo": core.NewHash("1111111111111111111111111111111111111111"),
- "ref/bar": core.NewHash("2222222222222222222222222222222222222222"),
- },
- peeled: make(map[string]core.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo^{}\n",
- pktline.FlushString,
- },
- references: make(map[string]core.Hash),
- peeled: map[string]core.Hash{
- "ref/foo": core.NewHash("1111111111111111111111111111111111111111"),
- },
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo\n",
- "2222222222222222222222222222222222222222 ref/bar^{}",
- pktline.FlushString,
- },
- references: map[string]core.Hash{
- "ref/foo": core.NewHash("1111111111111111111111111111111111111111"),
- },
- peeled: map[string]core.Hash{
- "ref/bar": core.NewHash("2222222222222222222222222222222222222222"),
- },
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "51b8b4fb32271d39fbdd760397406177b2b0fd36 refs/pull/10/head\n",
- "02b5a6031ba7a8cbfde5d65ff9e13ecdbc4a92ca refs/pull/100/head\n",
- "c284c212704c43659bf5913656b8b28e32da1621 refs/pull/100/merge\n",
- "3d6537dce68c8b7874333a1720958bd8db3ae8ca refs/pull/101/merge\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11^{}\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- pktline.FlushString,
- },
- references: map[string]core.Hash{
- "refs/heads/master": core.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- "refs/pull/10/head": core.NewHash("51b8b4fb32271d39fbdd760397406177b2b0fd36"),
- "refs/pull/100/head": core.NewHash("02b5a6031ba7a8cbfde5d65ff9e13ecdbc4a92ca"),
- "refs/pull/100/merge": core.NewHash("c284c212704c43659bf5913656b8b28e32da1621"),
- "refs/pull/101/merge": core.NewHash("3d6537dce68c8b7874333a1720958bd8db3ae8ca"),
- "refs/tags/v2.6.11": core.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- "refs/tags/v2.6.11-tree": core.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- },
- peeled: map[string]core.Hash{
- "refs/tags/v2.6.11": core.NewHash("c39ae07f393806ccf406ef966e9a15afc43cc36a"),
- "refs/tags/v2.6.11-tree": core.NewHash("c39ae07f393806ccf406ef966e9a15afc43cc36a"),
- },
- },
- } {
- ar := testDecodeOK(c, test.input)
- comment := Commentf("input = %v\n", test.input)
- c.Assert(ar.References, DeepEquals, test.references, comment)
- c.Assert(ar.Peeled, DeepEquals, test.peeled, comment)
- }
-}
-
-func (s *SuiteDecoder) TestMalformedOtherRefsNoSpace(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack thin-pack\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8crefs/tags/v2.6.11\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed ref data.*")
-}
-
-func (s *SuiteDecoder) TestMalformedOtherRefsMultipleSpaces(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack thin-pack\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags v2.6.11\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed ref data.*")
-}
-
-func (s *SuiteDecoder) TestShallow(c *C) {
- for _, test := range [...]struct {
- input []string
- shallows []core.Hash
- }{
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- pktline.FlushString,
- },
- shallows: []core.Hash{},
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- pktline.FlushString,
- },
- shallows: []core.Hash{core.NewHash("1111111111111111111111111111111111111111")},
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- },
- shallows: []core.Hash{
- core.NewHash("1111111111111111111111111111111111111111"),
- core.NewHash("2222222222222222222222222222222222222222"),
- },
- },
- } {
- ar := testDecodeOK(c, test.input)
- comment := Commentf("input = %v\n", test.input)
- c.Assert(ar.Shallows, DeepEquals, test.shallows, comment)
- }
-}
-
-func (s *SuiteDecoder) TestInvalidShallowHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 11111111alcortes111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid hash text.*")
-}
-
-func (s *SuiteDecoder) TestGarbageAfterShallow(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- "b5be40b90dbaa6bd337f3b77de361bfc0723468b refs/tags/v4.4",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed shallow prefix.*")
-}
-
-func (s *SuiteDecoder) TestMalformedShallowHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222 malformed\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed shallow hash.*")
-}
-
-func (s *SuiteDecoder) TestEOFRefs(c *C) {
- input := strings.NewReader("" +
- "005b6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n" +
- "003fa6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n" +
- "00355dc01c595e6c6ec9ccda4f6ffbf614e4d92bb0c7 refs/foo\n",
- )
- testDecoderErrorMatches(c, input, ".*invalid pkt-len.*")
-}
-
-func (s *SuiteDecoder) TestEOFShallows(c *C) {
- input := strings.NewReader("" +
- "005b6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n" +
- "003fa6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n" +
- "00445dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n" +
- "0047c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n" +
- "0035shallow 1111111111111111111111111111111111111111\n" +
- "0034shallow 222222222222222222222222")
- testDecoderErrorMatches(c, input, ".*unexpected EOF.*")
-}
diff --git a/formats/packp/advrefs/encoder.go b/formats/packp/advrefs/encoder.go
deleted file mode 100644
index 9874884..0000000
--- a/formats/packp/advrefs/encoder.go
+++ /dev/null
@@ -1,155 +0,0 @@
-package advrefs
-
-import (
- "bytes"
- "io"
- "sort"
-
- "gopkg.in/src-d/go-git.v4/core"
- "gopkg.in/src-d/go-git.v4/formats/packp"
- "gopkg.in/src-d/go-git.v4/formats/packp/pktline"
-)
-
-// An Encoder writes AdvRefs values to an output stream.
-type Encoder struct {
- data *AdvRefs // data to encode
- pe *pktline.Encoder // where to write the encoded data
- err error // sticky error
-}
-
-// NewEncoder returns a new encoder that writes to w.
-func NewEncoder(w io.Writer) *Encoder {
- return &Encoder{
- pe: pktline.NewEncoder(w),
- }
-}
-
-// Encode writes the AdvRefs encoding of v to the stream.
-//
-// All the payloads will end with a newline character. Capabilities,
-// references and shallows are writen in alphabetical order, except for
-// peeled references that always follow their corresponding references.
-func (e *Encoder) Encode(v *AdvRefs) error {
- e.data = v
-
- for state := encodePrefix; state != nil; {
- state = state(e)
- }
-
- return e.err
-}
-
-type encoderStateFn func(*Encoder) encoderStateFn
-
-func encodePrefix(e *Encoder) encoderStateFn {
- for _, p := range e.data.Prefix {
- if bytes.Equal(p, pktline.Flush) {
- if e.err = e.pe.Flush(); e.err != nil {
- return nil
- }
- continue
- }
- if e.err = e.pe.Encodef("%s\n", string(p)); e.err != nil {
- return nil
- }
- }
-
- return encodeFirstLine
-}
-
-// Adds the first pkt-line payload: head hash, head ref and capabilities.
-// Also handle the special case when no HEAD ref is found.
-func encodeFirstLine(e *Encoder) encoderStateFn {
- head := formatHead(e.data.Head)
- separator := formatSeparator(e.data.Head)
- capabilities := formatCaps(e.data.Capabilities)
-
- if e.err = e.pe.Encodef("%s %s\x00%s\n", head, separator, capabilities); e.err != nil {
- return nil
- }
-
- return encodeRefs
-}
-
-func formatHead(h *core.Hash) string {
- if h == nil {
- return core.ZeroHash.String()
- }
-
- return h.String()
-}
-
-func formatSeparator(h *core.Hash) string {
- if h == nil {
- return noHead
- }
-
- return head
-}
-
-func formatCaps(c *packp.Capabilities) string {
- if c == nil {
- return ""
- }
-
- c.Sort()
-
- return c.String()
-}
-
-// Adds the (sorted) refs: hash SP refname EOL
-// and their peeled refs if any.
-func encodeRefs(e *Encoder) encoderStateFn {
- refs := sortRefs(e.data.References)
- for _, r := range refs {
- hash, _ := e.data.References[r]
- if e.err = e.pe.Encodef("%s %s\n", hash.String(), r); e.err != nil {
- return nil
- }
-
- if hash, ok := e.data.Peeled[r]; ok {
- if e.err = e.pe.Encodef("%s %s^{}\n", hash.String(), r); e.err != nil {
- return nil
- }
- }
- }
-
- return encodeShallow
-}
-
-func sortRefs(m map[string]core.Hash) []string {
- ret := make([]string, 0, len(m))
- for k := range m {
- ret = append(ret, k)
- }
- sort.Strings(ret)
-
- return ret
-}
-
-// Adds the (sorted) shallows: "shallow" SP hash EOL
-func encodeShallow(e *Encoder) encoderStateFn {
- sorted := sortShallows(e.data.Shallows)
- for _, hash := range sorted {
- if e.err = e.pe.Encodef("shallow %s\n", hash); e.err != nil {
- return nil
- }
- }
-
- return encodeFlush
-}
-
-func sortShallows(c []core.Hash) []string {
- ret := []string{}
- for _, h := range c {
- ret = append(ret, h.String())
- }
- sort.Strings(ret)
-
- return ret
-}
-
-func encodeFlush(e *Encoder) encoderStateFn {
- e.err = e.pe.Flush()
- return nil
-}
diff --git a/formats/packp/advrefs/encoder_test.go b/formats/packp/advrefs/encoder_test.go
deleted file mode 100644
index 8fb475b..0000000
--- a/formats/packp/advrefs/encoder_test.go
+++ /dev/null
@@ -1,249 +0,0 @@
-package advrefs_test
-
-import (
- "bytes"
- "strings"
-
- "gopkg.in/src-d/go-git.v4/core"
- "gopkg.in/src-d/go-git.v4/formats/packp"
- "gopkg.in/src-d/go-git.v4/formats/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/formats/packp/pktline"
-
- . "gopkg.in/check.v1"
-)
-
-type SuiteEncoder struct{}
-
-var _ = Suite(&SuiteEncoder{})
-
-// returns a byte slice with the pkt-lines for the given payloads.
-func pktlines(c *C, payloads ...[]byte) []byte {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.Encode(payloads...)
- c.Assert(err, IsNil, Commentf("building pktlines for %v\n", payloads))
-
- return buf.Bytes()
-}
-
-func testEncode(c *C, input *advrefs.AdvRefs, expected []byte) {
- var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
- err := e.Encode(input)
- c.Assert(err, IsNil)
- obtained := buf.Bytes()
-
- comment := Commentf("\nobtained = %s\nexpected = %s\n", string(obtained), string(expected))
-
- c.Assert(obtained, DeepEquals, expected, comment)
-}
-
-func (s *SuiteEncoder) TestZeroValue(c *C) {
- ar := &advrefs.AdvRefs{}
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestHead(c *C) {
- hash := core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- ar := &advrefs.AdvRefs{
- Head: &hash,
- }
-
- expected := pktlines(c,
- []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestCapsNoHead(c *C) {
- capabilities := packp.NewCapabilities()
- capabilities.Add("symref", "HEAD:/refs/heads/master")
- capabilities.Add("ofs-delta")
- capabilities.Add("multi_ack")
- ar := &advrefs.AdvRefs{
- Capabilities: capabilities,
- }
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestCapsWithHead(c *C) {
- hash := core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- capabilities := packp.NewCapabilities()
- capabilities.Add("symref", "HEAD:/refs/heads/master")
- capabilities.Add("ofs-delta")
- capabilities.Add("multi_ack")
- ar := &advrefs.AdvRefs{
- Head: &hash,
- Capabilities: capabilities,
- }
-
- expected := pktlines(c,
- []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestRefs(c *C) {
- references := map[string]core.Hash{
- "refs/heads/master": core.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- "refs/tags/v2.6.12-tree": core.NewHash("1111111111111111111111111111111111111111"),
- "refs/tags/v2.7.13-tree": core.NewHash("3333333333333333333333333333333333333333"),
- "refs/tags/v2.6.13-tree": core.NewHash("2222222222222222222222222222222222222222"),
- "refs/tags/v2.6.11-tree": core.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- }
- ar := &advrefs.AdvRefs{
- References: references,
- }
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"),
- []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"),
- []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"),
- []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"),
- []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestPeeled(c *C) {
- references := map[string]core.Hash{
- "refs/heads/master": core.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- "refs/tags/v2.6.12-tree": core.NewHash("1111111111111111111111111111111111111111"),
- "refs/tags/v2.7.13-tree": core.NewHash("3333333333333333333333333333333333333333"),
- "refs/tags/v2.6.13-tree": core.NewHash("2222222222222222222222222222222222222222"),
- "refs/tags/v2.6.11-tree": core.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- }
- peeled := map[string]core.Hash{
- "refs/tags/v2.7.13-tree": core.NewHash("4444444444444444444444444444444444444444"),
- "refs/tags/v2.6.12-tree": core.NewHash("5555555555555555555555555555555555555555"),
- }
- ar := &advrefs.AdvRefs{
- References: references,
- Peeled: peeled,
- }
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"),
- []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"),
- []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"),
- []byte("5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n"),
- []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"),
- []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"),
- []byte("4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestShallow(c *C) {
- shallows := []core.Hash{
- core.NewHash("1111111111111111111111111111111111111111"),
- core.NewHash("4444444444444444444444444444444444444444"),
- core.NewHash("3333333333333333333333333333333333333333"),
- core.NewHash("2222222222222222222222222222222222222222"),
- }
- ar := &advrefs.AdvRefs{
- Shallows: shallows,
- }
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- []byte("shallow 1111111111111111111111111111111111111111\n"),
- []byte("shallow 2222222222222222222222222222222222222222\n"),
- []byte("shallow 3333333333333333333333333333333333333333\n"),
- []byte("shallow 4444444444444444444444444444444444444444\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestAll(c *C) {
- hash := core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
-
- capabilities := packp.NewCapabilities()
- capabilities.Add("symref", "HEAD:/refs/heads/master")
- capabilities.Add("ofs-delta")
- capabilities.Add("multi_ack")
-
- references := map[string]core.Hash{
- "refs/heads/master": core.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- "refs/tags/v2.6.12-tree": core.NewHash("1111111111111111111111111111111111111111"),
- "refs/tags/v2.7.13-tree": core.NewHash("3333333333333333333333333333333333333333"),
- "refs/tags/v2.6.13-tree": core.NewHash("2222222222222222222222222222222222222222"),
- "refs/tags/v2.6.11-tree": core.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- }
-
- peeled := map[string]core.Hash{
- "refs/tags/v2.7.13-tree": core.NewHash("4444444444444444444444444444444444444444"),
- "refs/tags/v2.6.12-tree": core.NewHash("5555555555555555555555555555555555555555"),
- }
-
- shallows := []core.Hash{
- core.NewHash("1111111111111111111111111111111111111111"),
- core.NewHash("4444444444444444444444444444444444444444"),
- core.NewHash("3333333333333333333333333333333333333333"),
- core.NewHash("2222222222222222222222222222222222222222"),
- }
-
- ar := &advrefs.AdvRefs{
- Head: &hash,
- Capabilities: capabilities,
- References: references,
- Peeled: peeled,
- Shallows: shallows,
- }
-
- expected := pktlines(c,
- []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"),
- []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"),
- []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"),
- []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"),
- []byte("5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n"),
- []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"),
- []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"),
- []byte("4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n"),
- []byte("shallow 1111111111111111111111111111111111111111\n"),
- []byte("shallow 2222222222222222222222222222222222222222\n"),
- []byte("shallow 3333333333333333333333333333333333333333\n"),
- []byte("shallow 4444444444444444444444444444444444444444\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestErrorTooLong(c *C) {
- references := map[string]core.Hash{
- strings.Repeat("a", pktline.MaxPayloadSize): core.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- }
- ar := &advrefs.AdvRefs{
- References: references,
- }
-
- var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
- err := e.Encode(ar)
- c.Assert(err, ErrorMatches, ".*payload is too long.*")
-}