aboutsummaryrefslogtreecommitdiffstats
path: root/plumbing/format
diff options
context:
space:
mode:
authorSantiago M. Mola <santi@mola.io>2016-11-25 15:48:20 +0100
committerMáximo Cuadros <mcuadros@gmail.com>2016-11-25 15:48:20 +0100
commitf9adb3565b36ba1573102f954d0ee916009efac2 (patch)
treeabc5b98e61b5851a985a215f7265ce2e9eb131f7 /plumbing/format
parentc1e0932c6cbcc55a78f338d437b9f13d89f33552 (diff)
downloadgo-git-f9adb3565b36ba1573102f954d0ee916009efac2.tar.gz
move: format/packp -> protocol/packp (#141)
* move: format/packp -> protocol/packp * format/packp -> protocol/packp * format/packp/pktline -> format/pktline. * move: protocol/packp/ulreq/* -> protocol/packp/* * protocol/packp: rename UlReq types to make them unique. * * protocol/packp: namespace UlReq encoder. * protocol/packp: namespace UlReq decoder. * protocol/packp: fix example names * move: protocol/packp/advrefs/* -> protocol/packp/* * further ulreq namespacing * protocol/packp: namespace AdvRefs types.
Diffstat (limited to 'plumbing/format')
-rw-r--r--plumbing/format/packp/advrefs/advrefs.go58
-rw-r--r--plumbing/format/packp/advrefs/advrefs_test.go315
-rw-r--r--plumbing/format/packp/advrefs/decoder.go300
-rw-r--r--plumbing/format/packp/advrefs/decoder_test.go526
-rw-r--r--plumbing/format/packp/advrefs/encoder.go155
-rw-r--r--plumbing/format/packp/advrefs/encoder_test.go249
-rw-r--r--plumbing/format/packp/capabilities.go136
-rw-r--r--plumbing/format/packp/capabilities_test.go46
-rw-r--r--plumbing/format/packp/doc.go724
-rw-r--r--plumbing/format/packp/ulreq/decoder.go287
-rw-r--r--plumbing/format/packp/ulreq/decoder_test.go541
-rw-r--r--plumbing/format/packp/ulreq/encoder.go140
-rw-r--r--plumbing/format/packp/ulreq/encoder_test.go268
-rw-r--r--plumbing/format/packp/ulreq/ulreq.go56
-rw-r--r--plumbing/format/packp/ulreq/ulreq_test.go91
-rw-r--r--plumbing/format/pktline/encoder.go (renamed from plumbing/format/packp/pktline/encoder.go)0
-rw-r--r--plumbing/format/pktline/encoder_test.go (renamed from plumbing/format/packp/pktline/encoder_test.go)2
-rw-r--r--plumbing/format/pktline/scanner.go (renamed from plumbing/format/packp/pktline/scanner.go)0
-rw-r--r--plumbing/format/pktline/scanner_test.go (renamed from plumbing/format/packp/pktline/scanner_test.go)2
19 files changed, 2 insertions, 3894 deletions
diff --git a/plumbing/format/packp/advrefs/advrefs.go b/plumbing/format/packp/advrefs/advrefs.go
deleted file mode 100644
index 4d7c897..0000000
--- a/plumbing/format/packp/advrefs/advrefs.go
+++ /dev/null
@@ -1,58 +0,0 @@
-// Package advrefs implements encoding and decoding advertised-refs
-// messages from a git-upload-pack command.
-package advrefs
-
-import (
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
-)
-
-const (
- hashSize = 40
- head = "HEAD"
- noHead = "capabilities^{}"
-)
-
-var (
- sp = []byte(" ")
- null = []byte("\x00")
- eol = []byte("\n")
- peeled = []byte("^{}")
- shallow = []byte("shallow ")
- noHeadMark = []byte(" capabilities^{}\x00")
-)
-
-// AdvRefs values represent the information transmitted on an
-// advertised-refs message. Values from this type are not zero-value
-// safe, use the New function instead.
-//
-// When using this messages over (smart) HTTP, you have to add a pktline
-// before the whole thing with the following payload:
-//
-// '# service=$servicename" LF
-//
-// Moreover, some (all) git HTTP smart servers will send a flush-pkt
-// just after the first pkt-line.
-//
-// To accomodate both situations, the Prefix field allow you to store
-// any data you want to send before the actual pktlines. It will also
-// be filled up with whatever is found on the line.
-type AdvRefs struct {
- Prefix [][]byte // payloads of the prefix
- Head *plumbing.Hash
- Capabilities *packp.Capabilities
- References map[string]plumbing.Hash
- Peeled map[string]plumbing.Hash
- Shallows []plumbing.Hash
-}
-
-// New returns a pointer to a new AdvRefs value, ready to be used.
-func New() *AdvRefs {
- return &AdvRefs{
- Prefix: [][]byte{},
- Capabilities: packp.NewCapabilities(),
- References: make(map[string]plumbing.Hash),
- Peeled: make(map[string]plumbing.Hash),
- Shallows: []plumbing.Hash{},
- }
-}
diff --git a/plumbing/format/packp/advrefs/advrefs_test.go b/plumbing/format/packp/advrefs/advrefs_test.go
deleted file mode 100644
index 2639b6e..0000000
--- a/plumbing/format/packp/advrefs/advrefs_test.go
+++ /dev/null
@@ -1,315 +0,0 @@
-package advrefs_test
-
-import (
- "bytes"
- "fmt"
- "io"
- "strings"
- "testing"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-
- . "gopkg.in/check.v1"
-)
-
-func Test(t *testing.T) { TestingT(t) }
-
-type SuiteDecodeEncode struct{}
-
-var _ = Suite(&SuiteDecodeEncode{})
-
-func (s *SuiteDecodeEncode) test(c *C, in []string, exp []string) {
- var err error
- var input io.Reader
- {
- var buf bytes.Buffer
- p := pktline.NewEncoder(&buf)
- err = p.EncodeString(in...)
- c.Assert(err, IsNil)
- input = &buf
- }
-
- var expected []byte
- {
- var buf bytes.Buffer
- p := pktline.NewEncoder(&buf)
- err = p.EncodeString(exp...)
- c.Assert(err, IsNil)
-
- expected = buf.Bytes()
- }
-
- var obtained []byte
- {
- ar := advrefs.New()
- d := advrefs.NewDecoder(input)
- err = d.Decode(ar)
- c.Assert(err, IsNil)
-
- var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
- err := e.Encode(ar)
- c.Assert(err, IsNil)
-
- obtained = buf.Bytes()
- }
-
- c.Assert(obtained, DeepEquals, expected,
- Commentf("input = %v\nobtained = %q\nexpected = %q\n",
- in, string(obtained), string(expected)))
-}
-
-func (s *SuiteDecodeEncode) TestNoHead(c *C) {
- input := []string{
- "0000000000000000000000000000000000000000 capabilities^{}\x00",
- pktline.FlushString,
- }
-
- expected := []string{
- "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestNoHeadSmart(c *C) {
- input := []string{
- "# service=git-upload-pack\n",
- "0000000000000000000000000000000000000000 capabilities^{}\x00",
- pktline.FlushString,
- }
-
- expected := []string{
- "# service=git-upload-pack\n",
- "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestNoHeadSmartBug(c *C) {
- input := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestRefs(c *C) {
- input := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree",
- pktline.FlushString,
- }
-
- expected := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestPeeled(c *C) {
- input := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestAll(c *C) {
- input := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}",
- "shallow 1111111111111111111111111111111111111111",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestAllSmart(c *C) {
- input := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func (s *SuiteDecodeEncode) TestAllSmartBug(c *C) {
- input := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- expected := []string{
- "# service=git-upload-pack\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
- "8888888888888888888888888888888888888888 refs/tags/v2.6.12-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
-
- s.test(c, input, expected)
-}
-
-func ExampleDecoder_Decode() {
- // Here is a raw advertised-ref message.
- raw := "" +
- "0065a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n" +
- "003fa6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n" +
- "00441111111111111111111111111111111111111111 refs/tags/v2.6.11-tree\n" +
- "00475555555555555555555555555555555555555555 refs/tags/v2.6.11-tree^{}\n" +
- "0035shallow 5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c\n" +
- "0000"
-
- // Use the raw message as our input.
- input := strings.NewReader(raw)
-
- // Create a advref.Decoder reading from our input.
- d := advrefs.NewDecoder(input)
-
- // Decode the input into a newly allocated AdvRefs value.
- ar := advrefs.New()
- _ = d.Decode(ar) // error check ignored for brevity
-
- // Do something interesting with the AdvRefs, e.g. print its contents.
- fmt.Println("head =", ar.Head)
- fmt.Println("capabilities =", ar.Capabilities.String())
- fmt.Println("...")
- fmt.Println("shallows =", ar.Shallows)
- // Output: head = a6930aaee06755d1bdcfd943fbf614e4d92bb0c7
- // capabilities = multi_ack ofs-delta symref=HEAD:/refs/heads/master
- // ...
- // shallows = [5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c]
-}
-
-func ExampleEncoder_Encode() {
- // Create an AdvRefs with the contents you want...
- ar := advrefs.New()
-
- // ...add a hash for the HEAD...
- head := plumbing.NewHash("1111111111111111111111111111111111111111")
- ar.Head = &head
-
- // ...add some server capabilities...
- ar.Capabilities.Add("symref", "HEAD:/refs/heads/master")
- ar.Capabilities.Add("ofs-delta")
- ar.Capabilities.Add("multi_ack")
-
- // ...add a couple of references...
- ar.References["refs/heads/master"] = plumbing.NewHash("2222222222222222222222222222222222222222")
- ar.References["refs/tags/v1"] = plumbing.NewHash("3333333333333333333333333333333333333333")
-
- // ...including a peeled ref...
- ar.Peeled["refs/tags/v1"] = plumbing.NewHash("4444444444444444444444444444444444444444")
-
- // ...and finally add a shallow
- ar.Shallows = append(ar.Shallows, plumbing.NewHash("5555555555555555555555555555555555555555"))
-
- // Encode the advrefs.Contents to a bytes.Buffer.
- // You can encode into stdout too, but you will not be able
- // see the '\x00' after "HEAD".
- var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
- _ = e.Encode(ar) // error checks ignored for brevity
-
- // Print the contents of the buffer as a quoted string.
- // Printing is as a non-quoted string will be prettier but you
- // will miss the '\x00' after "HEAD".
- fmt.Printf("%q", buf.String())
- // Output:
- // "00651111111111111111111111111111111111111111 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n003f2222222222222222222222222222222222222222 refs/heads/master\n003a3333333333333333333333333333333333333333 refs/tags/v1\n003d4444444444444444444444444444444444444444 refs/tags/v1^{}\n0035shallow 5555555555555555555555555555555555555555\n0000"
-}
diff --git a/plumbing/format/packp/advrefs/decoder.go b/plumbing/format/packp/advrefs/decoder.go
deleted file mode 100644
index c50eeef..0000000
--- a/plumbing/format/packp/advrefs/decoder.go
+++ /dev/null
@@ -1,300 +0,0 @@
-package advrefs
-
-import (
- "bytes"
- "encoding/hex"
- "errors"
- "fmt"
- "io"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-)
-
-// A Decoder reads and decodes AdvRef values from an input stream.
-type Decoder struct {
- s *pktline.Scanner // a pkt-line scanner from the input stream
- line []byte // current pkt-line contents, use parser.nextLine() to make it advance
- nLine int // current pkt-line number for debugging, begins at 1
- hash plumbing.Hash // last hash read
- err error // sticky error, use the parser.error() method to fill this out
- data *AdvRefs // parsed data is stored here
-}
-
-// ErrEmpty is returned by Decode when there was no advertised-message at all
-var ErrEmpty = errors.New("empty advertised-ref message")
-
-// NewDecoder returns a new decoder that reads from r.
-//
-// Will not read more data from r than necessary.
-func NewDecoder(r io.Reader) *Decoder {
- return &Decoder{
- s: pktline.NewScanner(r),
- }
-}
-
-// Decode reads the next advertised-refs message form its input and
-// stores it in the value pointed to by v.
-func (d *Decoder) Decode(v *AdvRefs) error {
- d.data = v
-
- for state := decodePrefix; state != nil; {
- state = state(d)
- }
-
- return d.err
-}
-
-type decoderStateFn func(*Decoder) decoderStateFn
-
-// fills out the parser stiky error
-func (d *Decoder) error(format string, a ...interface{}) {
- d.err = fmt.Errorf("pkt-line %d: %s", d.nLine,
- fmt.Sprintf(format, a...))
-}
-
-// Reads a new pkt-line from the scanner, makes its payload available as
-// p.line and increments p.nLine. A successful invocation returns true,
-// otherwise, false is returned and the sticky error is filled out
-// accordingly. Trims eols at the end of the payloads.
-func (d *Decoder) nextLine() bool {
- d.nLine++
-
- if !d.s.Scan() {
- if d.err = d.s.Err(); d.err != nil {
- return false
- }
-
- if d.nLine == 1 {
- d.err = ErrEmpty
- return false
- }
-
- d.error("EOF")
- return false
- }
-
- d.line = d.s.Bytes()
- d.line = bytes.TrimSuffix(d.line, eol)
-
- return true
-}
-
-// The HTTP smart prefix is often followed by a flush-pkt.
-func decodePrefix(d *Decoder) decoderStateFn {
- if ok := d.nextLine(); !ok {
- return nil
- }
-
- // If the repository is empty, we receive a flush here (SSH).
- if isFlush(d.line) {
- d.err = ErrEmpty
- return nil
- }
-
- if isPrefix(d.line) {
- tmp := make([]byte, len(d.line))
- copy(tmp, d.line)
- d.data.Prefix = append(d.data.Prefix, tmp)
- if ok := d.nextLine(); !ok {
- return nil
- }
- }
-
- if isFlush(d.line) {
- d.data.Prefix = append(d.data.Prefix, pktline.Flush)
- if ok := d.nextLine(); !ok {
- return nil
- }
- }
-
- return decodeFirstHash
-}
-
-func isPrefix(payload []byte) bool {
- return payload[0] == '#'
-}
-
-func isFlush(payload []byte) bool {
- return len(payload) == 0
-}
-
-// If the first hash is zero, then a no-refs is comming. Otherwise, a
-// list-of-refs is comming, and the hash will be followed by the first
-// advertised ref.
-func decodeFirstHash(p *Decoder) decoderStateFn {
- // If the repository is empty, we receive a flush here (HTTP).
- if isFlush(p.line) {
- p.err = ErrEmpty
- return nil
- }
-
- if len(p.line) < hashSize {
- p.error("cannot read hash, pkt-line too short")
- return nil
- }
-
- if _, err := hex.Decode(p.hash[:], p.line[:hashSize]); err != nil {
- p.error("invalid hash text: %s", err)
- return nil
- }
-
- p.line = p.line[hashSize:]
-
- if p.hash.IsZero() {
- return decodeSkipNoRefs
- }
-
- return decodeFirstRef
-}
-
-// Skips SP "capabilities^{}" NUL
-func decodeSkipNoRefs(p *Decoder) decoderStateFn {
- if len(p.line) < len(noHeadMark) {
- p.error("too short zero-id ref")
- return nil
- }
-
- if !bytes.HasPrefix(p.line, noHeadMark) {
- p.error("malformed zero-id ref")
- return nil
- }
-
- p.line = p.line[len(noHeadMark):]
-
- return decodeCaps
-}
-
-// decode the refname, expectes SP refname NULL
-func decodeFirstRef(l *Decoder) decoderStateFn {
- if len(l.line) < 3 {
- l.error("line too short after hash")
- return nil
- }
-
- if !bytes.HasPrefix(l.line, sp) {
- l.error("no space after hash")
- return nil
- }
- l.line = l.line[1:]
-
- chunks := bytes.SplitN(l.line, null, 2)
- if len(chunks) < 2 {
- l.error("NULL not found")
- return nil
- }
- ref := chunks[0]
- l.line = chunks[1]
-
- if bytes.Equal(ref, []byte(head)) {
- l.data.Head = &l.hash
- } else {
- l.data.References[string(ref)] = l.hash
- }
-
- return decodeCaps
-}
-
-func decodeCaps(p *Decoder) decoderStateFn {
- if len(p.line) == 0 {
- return decodeOtherRefs
- }
-
- for _, c := range bytes.Split(p.line, sp) {
- name, values := readCapability(c)
- p.data.Capabilities.Add(name, values...)
- }
-
- return decodeOtherRefs
-}
-
-// Capabilities are a single string or a name=value.
-// Even though we are only going to read at moust 1 value, we return
-// a slice of values, as Capability.Add receives that.
-func readCapability(data []byte) (name string, values []string) {
- pair := bytes.SplitN(data, []byte{'='}, 2)
- if len(pair) == 2 {
- values = append(values, string(pair[1]))
- }
-
- return string(pair[0]), values
-}
-
-// The refs are either tips (obj-id SP refname) or a peeled (obj-id SP refname^{}).
-// If there are no refs, then there might be a shallow or flush-ptk.
-func decodeOtherRefs(p *Decoder) decoderStateFn {
- if ok := p.nextLine(); !ok {
- return nil
- }
-
- if bytes.HasPrefix(p.line, shallow) {
- return decodeShallow
- }
-
- if len(p.line) == 0 {
- return nil
- }
-
- saveTo := p.data.References
- if bytes.HasSuffix(p.line, peeled) {
- p.line = bytes.TrimSuffix(p.line, peeled)
- saveTo = p.data.Peeled
- }
-
- ref, hash, err := readRef(p.line)
- if err != nil {
- p.error("%s", err)
- return nil
- }
- saveTo[ref] = hash
-
- return decodeOtherRefs
-}
-
-// Reads a ref-name
-func readRef(data []byte) (string, plumbing.Hash, error) {
- chunks := bytes.Split(data, sp)
- switch {
- case len(chunks) == 1:
- return "", plumbing.ZeroHash, fmt.Errorf("malformed ref data: no space was found")
- case len(chunks) > 2:
- return "", plumbing.ZeroHash, fmt.Errorf("malformed ref data: more than one space found")
- default:
- return string(chunks[1]), plumbing.NewHash(string(chunks[0])), nil
- }
-}
-
-// Keeps reading shallows until a flush-pkt is found
-func decodeShallow(p *Decoder) decoderStateFn {
- if !bytes.HasPrefix(p.line, shallow) {
- p.error("malformed shallow prefix, found %q... instead", p.line[:len(shallow)])
- return nil
- }
- p.line = bytes.TrimPrefix(p.line, shallow)
-
- if len(p.line) != hashSize {
- p.error(fmt.Sprintf(
- "malformed shallow hash: wrong length, expected 40 bytes, read %d bytes",
- len(p.line)))
- return nil
- }
-
- text := p.line[:hashSize]
- var h plumbing.Hash
- if _, err := hex.Decode(h[:], text); err != nil {
- p.error("invalid hash text: %s", err)
- return nil
- }
-
- p.data.Shallows = append(p.data.Shallows, h)
-
- if ok := p.nextLine(); !ok {
- return nil
- }
-
- if len(p.line) == 0 {
- return nil // succesfull parse of the advertised-refs message
- }
-
- return decodeShallow
-}
diff --git a/plumbing/format/packp/advrefs/decoder_test.go b/plumbing/format/packp/advrefs/decoder_test.go
deleted file mode 100644
index bacf79a..0000000
--- a/plumbing/format/packp/advrefs/decoder_test.go
+++ /dev/null
@@ -1,526 +0,0 @@
-package advrefs_test
-
-import (
- "bytes"
- "io"
- "strings"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-
- . "gopkg.in/check.v1"
-)
-
-type SuiteDecoder struct{}
-
-var _ = Suite(&SuiteDecoder{})
-
-func (s *SuiteDecoder) TestEmpty(c *C) {
- ar := advrefs.New()
- var buf bytes.Buffer
- d := advrefs.NewDecoder(&buf)
-
- err := d.Decode(ar)
- c.Assert(err, Equals, advrefs.ErrEmpty)
-}
-
-func (s *SuiteDecoder) TestEmptyFlush(c *C) {
- ar := advrefs.New()
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- e.Flush()
-
- d := advrefs.NewDecoder(&buf)
-
- err := d.Decode(ar)
- c.Assert(err, Equals, advrefs.ErrEmpty)
-}
-
-func (s *SuiteDecoder) TestEmptyPrefixFlush(c *C) {
- ar := advrefs.New()
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- e.EncodeString("# service=git-upload-pack")
- e.Flush()
- e.Flush()
-
- d := advrefs.NewDecoder(&buf)
-
- err := d.Decode(ar)
- c.Assert(err, Equals, advrefs.ErrEmpty)
-}
-
-func (s *SuiteDecoder) TestShortForHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*too short")
-}
-
-func toPktLines(c *C, payloads []string) io.Reader {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil)
-
- return &buf
-}
-
-func testDecoderErrorMatches(c *C, input io.Reader, pattern string) {
- ar := advrefs.New()
- d := advrefs.NewDecoder(input)
-
- err := d.Decode(ar)
- c.Assert(err, ErrorMatches, pattern)
-}
-
-func (s *SuiteDecoder) TestInvalidFirstHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796alberto2219af86ec6584e5 HEAD\x00multi_ack thin-pack\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid hash.*")
-}
-
-func (s *SuiteDecoder) TestZeroId(c *C) {
- payloads := []string{
- "0000000000000000000000000000000000000000 capabilities^{}\x00multi_ack thin-pack\n",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(ar.Head, IsNil)
-}
-
-func testDecodeOK(c *C, payloads []string) *advrefs.AdvRefs {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil)
-
- ar := advrefs.New()
- d := advrefs.NewDecoder(&buf)
-
- err = d.Decode(ar)
- c.Assert(err, IsNil)
-
- return ar
-}
-
-func (s *SuiteDecoder) TestMalformedZeroId(c *C) {
- payloads := []string{
- "0000000000000000000000000000000000000000 wrong\x00multi_ack thin-pack\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed zero-id.*")
-}
-
-func (s *SuiteDecoder) TestShortZeroId(c *C) {
- payloads := []string{
- "0000000000000000000000000000000000000000 capabi",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*too short zero-id.*")
-}
-
-func (s *SuiteDecoder) TestHead(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(*ar.Head, Equals,
- plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"))
-}
-
-func (s *SuiteDecoder) TestFirstIsNotHead(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 refs/heads/master\x00",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(ar.Head, IsNil)
- c.Assert(ar.References["refs/heads/master"], Equals,
- plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"))
-}
-
-func (s *SuiteDecoder) TestShortRef(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 H",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*too short.*")
-}
-
-func (s *SuiteDecoder) TestNoNULL(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEADofs-delta multi_ack",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*NULL not found.*")
-}
-
-func (s *SuiteDecoder) TestNoSpaceAfterHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5-HEAD\x00",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*no space after hash.*")
-}
-
-func (s *SuiteDecoder) TestNoCaps(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(ar.Capabilities.IsEmpty(), Equals, true)
-}
-
-func (s *SuiteDecoder) TestCaps(c *C) {
- for _, test := range [...]struct {
- input []string
- capabilities []packp.Capability
- }{
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{},
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{},
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {
- Name: "ofs-delta",
- Values: []string(nil),
- },
- },
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {Name: "ofs-delta", Values: []string(nil)},
- {Name: "multi_ack", Values: []string(nil)},
- },
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack\n",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {Name: "ofs-delta", Values: []string(nil)},
- {Name: "multi_ack", Values: []string(nil)},
- },
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:refs/heads/master agent=foo=bar\n",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {Name: "symref", Values: []string{"HEAD:refs/heads/master"}},
- {Name: "agent", Values: []string{"foo=bar"}},
- },
- },
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:refs/heads/master agent=foo=bar agent=new-agent\n",
- pktline.FlushString,
- },
- capabilities: []packp.Capability{
- {Name: "symref", Values: []string{"HEAD:refs/heads/master"}},
- {Name: "agent", Values: []string{"foo=bar", "new-agent"}},
- },
- },
- } {
- ar := testDecodeOK(c, test.input)
- for _, fixCap := range test.capabilities {
- c.Assert(ar.Capabilities.Supports(fixCap.Name), Equals, true,
- Commentf("input = %q, capability = %q", test.input, fixCap.Name))
- c.Assert(ar.Capabilities.Get(fixCap.Name).Values, DeepEquals, fixCap.Values,
- Commentf("input = %q, capability = %q", test.input, fixCap.Name))
- }
- }
-}
-
-func (s *SuiteDecoder) TestWithPrefix(c *C) {
- payloads := []string{
- "# this is a prefix\n",
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00foo\n",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(len(ar.Prefix), Equals, 1)
- c.Assert(ar.Prefix[0], DeepEquals, []byte("# this is a prefix"))
-}
-
-func (s *SuiteDecoder) TestWithPrefixAndFlush(c *C) {
- payloads := []string{
- "# this is a prefix\n",
- pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00foo\n",
- pktline.FlushString,
- }
- ar := testDecodeOK(c, payloads)
- c.Assert(len(ar.Prefix), Equals, 2)
- c.Assert(ar.Prefix[0], DeepEquals, []byte("# this is a prefix"))
- c.Assert(ar.Prefix[1], DeepEquals, []byte(pktline.FlushString))
-}
-
-func (s *SuiteDecoder) TestOtherRefs(c *C) {
- for _, test := range [...]struct {
- input []string
- references map[string]plumbing.Hash
- peeled map[string]plumbing.Hash
- }{
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- pktline.FlushString,
- },
- references: make(map[string]plumbing.Hash),
- peeled: make(map[string]plumbing.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- },
- peeled: make(map[string]plumbing.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo\n",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- },
- peeled: make(map[string]plumbing.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo\n",
- "2222222222222222222222222222222222222222 ref/bar",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- "ref/bar": plumbing.NewHash("2222222222222222222222222222222222222222"),
- },
- peeled: make(map[string]plumbing.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo^{}\n",
- pktline.FlushString,
- },
- references: make(map[string]plumbing.Hash),
- peeled: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- },
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo\n",
- "2222222222222222222222222222222222222222 ref/bar^{}",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- },
- peeled: map[string]plumbing.Hash{
- "ref/bar": plumbing.NewHash("2222222222222222222222222222222222222222"),
- },
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "51b8b4fb32271d39fbdd760397406177b2b0fd36 refs/pull/10/head\n",
- "02b5a6031ba7a8cbfde5d65ff9e13ecdbc4a92ca refs/pull/100/head\n",
- "c284c212704c43659bf5913656b8b28e32da1621 refs/pull/100/merge\n",
- "3d6537dce68c8b7874333a1720958bd8db3ae8ca refs/pull/101/merge\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11^{}\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "refs/heads/master": plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- "refs/pull/10/head": plumbing.NewHash("51b8b4fb32271d39fbdd760397406177b2b0fd36"),
- "refs/pull/100/head": plumbing.NewHash("02b5a6031ba7a8cbfde5d65ff9e13ecdbc4a92ca"),
- "refs/pull/100/merge": plumbing.NewHash("c284c212704c43659bf5913656b8b28e32da1621"),
- "refs/pull/101/merge": plumbing.NewHash("3d6537dce68c8b7874333a1720958bd8db3ae8ca"),
- "refs/tags/v2.6.11": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- "refs/tags/v2.6.11-tree": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- },
- peeled: map[string]plumbing.Hash{
- "refs/tags/v2.6.11": plumbing.NewHash("c39ae07f393806ccf406ef966e9a15afc43cc36a"),
- "refs/tags/v2.6.11-tree": plumbing.NewHash("c39ae07f393806ccf406ef966e9a15afc43cc36a"),
- },
- },
- } {
- ar := testDecodeOK(c, test.input)
- comment := Commentf("input = %v\n", test.input)
- c.Assert(ar.References, DeepEquals, test.references, comment)
- c.Assert(ar.Peeled, DeepEquals, test.peeled, comment)
- }
-}
-
-func (s *SuiteDecoder) TestMalformedOtherRefsNoSpace(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack thin-pack\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8crefs/tags/v2.6.11\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed ref data.*")
-}
-
-func (s *SuiteDecoder) TestMalformedOtherRefsMultipleSpaces(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack thin-pack\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags v2.6.11\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed ref data.*")
-}
-
-func (s *SuiteDecoder) TestShallow(c *C) {
- for _, test := range [...]struct {
- input []string
- shallows []plumbing.Hash
- }{
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- pktline.FlushString,
- },
- shallows: []plumbing.Hash{},
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- pktline.FlushString,
- },
- shallows: []plumbing.Hash{plumbing.NewHash("1111111111111111111111111111111111111111")},
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- },
- shallows: []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- plumbing.NewHash("2222222222222222222222222222222222222222"),
- },
- },
- } {
- ar := testDecodeOK(c, test.input)
- comment := Commentf("input = %v\n", test.input)
- c.Assert(ar.Shallows, DeepEquals, test.shallows, comment)
- }
-}
-
-func (s *SuiteDecoder) TestInvalidShallowHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 11111111alcortes111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid hash text.*")
-}
-
-func (s *SuiteDecoder) TestGarbageAfterShallow(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- "b5be40b90dbaa6bd337f3b77de361bfc0723468b refs/tags/v4.4",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed shallow prefix.*")
-}
-
-func (s *SuiteDecoder) TestMalformedShallowHash(c *C) {
- payloads := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222 malformed\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed shallow hash.*")
-}
-
-func (s *SuiteDecoder) TestEOFRefs(c *C) {
- input := strings.NewReader("" +
- "005b6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n" +
- "003fa6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n" +
- "00355dc01c595e6c6ec9ccda4f6ffbf614e4d92bb0c7 refs/foo\n",
- )
- testDecoderErrorMatches(c, input, ".*invalid pkt-len.*")
-}
-
-func (s *SuiteDecoder) TestEOFShallows(c *C) {
- input := strings.NewReader("" +
- "005b6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n" +
- "003fa6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n" +
- "00445dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n" +
- "0047c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n" +
- "0035shallow 1111111111111111111111111111111111111111\n" +
- "0034shallow 222222222222222222222222")
- testDecoderErrorMatches(c, input, ".*unexpected EOF.*")
-}
diff --git a/plumbing/format/packp/advrefs/encoder.go b/plumbing/format/packp/advrefs/encoder.go
deleted file mode 100644
index 8c52f14..0000000
--- a/plumbing/format/packp/advrefs/encoder.go
+++ /dev/null
@@ -1,155 +0,0 @@
-package advrefs
-
-import (
- "bytes"
- "io"
- "sort"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-)
-
-// An Encoder writes AdvRefs values to an output stream.
-type Encoder struct {
- data *AdvRefs // data to encode
- pe *pktline.Encoder // where to write the encoded data
- err error // sticky error
-}
-
-// NewEncoder returns a new encoder that writes to w.
-func NewEncoder(w io.Writer) *Encoder {
- return &Encoder{
- pe: pktline.NewEncoder(w),
- }
-}
-
-// Encode writes the AdvRefs encoding of v to the stream.
-//
-// All the payloads will end with a newline character. Capabilities,
-// references and shallows are writen in alphabetical order, except for
-// peeled references that always follow their corresponding references.
-func (e *Encoder) Encode(v *AdvRefs) error {
- e.data = v
-
- for state := encodePrefix; state != nil; {
- state = state(e)
- }
-
- return e.err
-}
-
-type encoderStateFn func(*Encoder) encoderStateFn
-
-func encodePrefix(e *Encoder) encoderStateFn {
- for _, p := range e.data.Prefix {
- if bytes.Equal(p, pktline.Flush) {
- if e.err = e.pe.Flush(); e.err != nil {
- return nil
- }
- continue
- }
- if e.err = e.pe.Encodef("%s\n", string(p)); e.err != nil {
- return nil
- }
- }
-
- return encodeFirstLine
-}
-
-// Adds the first pkt-line payload: head hash, head ref and capabilities.
-// Also handle the special case when no HEAD ref is found.
-func encodeFirstLine(e *Encoder) encoderStateFn {
- head := formatHead(e.data.Head)
- separator := formatSeparator(e.data.Head)
- capabilities := formatCaps(e.data.Capabilities)
-
- if e.err = e.pe.Encodef("%s %s\x00%s\n", head, separator, capabilities); e.err != nil {
- return nil
- }
-
- return encodeRefs
-}
-
-func formatHead(h *plumbing.Hash) string {
- if h == nil {
- return plumbing.ZeroHash.String()
- }
-
- return h.String()
-}
-
-func formatSeparator(h *plumbing.Hash) string {
- if h == nil {
- return noHead
- }
-
- return head
-}
-
-func formatCaps(c *packp.Capabilities) string {
- if c == nil {
- return ""
- }
-
- c.Sort()
-
- return c.String()
-}
-
-// Adds the (sorted) refs: hash SP refname EOL
-// and their peeled refs if any.
-func encodeRefs(e *Encoder) encoderStateFn {
- refs := sortRefs(e.data.References)
- for _, r := range refs {
- hash, _ := e.data.References[r]
- if e.err = e.pe.Encodef("%s %s\n", hash.String(), r); e.err != nil {
- return nil
- }
-
- if hash, ok := e.data.Peeled[r]; ok {
- if e.err = e.pe.Encodef("%s %s^{}\n", hash.String(), r); e.err != nil {
- return nil
- }
- }
- }
-
- return encodeShallow
-}
-
-func sortRefs(m map[string]plumbing.Hash) []string {
- ret := make([]string, 0, len(m))
- for k := range m {
- ret = append(ret, k)
- }
- sort.Strings(ret)
-
- return ret
-}
-
-// Adds the (sorted) shallows: "shallow" SP hash EOL
-func encodeShallow(e *Encoder) encoderStateFn {
- sorted := sortShallows(e.data.Shallows)
- for _, hash := range sorted {
- if e.err = e.pe.Encodef("shallow %s\n", hash); e.err != nil {
- return nil
- }
- }
-
- return encodeFlush
-}
-
-func sortShallows(c []plumbing.Hash) []string {
- ret := []string{}
- for _, h := range c {
- ret = append(ret, h.String())
- }
- sort.Strings(ret)
-
- return ret
-}
-
-func encodeFlush(e *Encoder) encoderStateFn {
- e.err = e.pe.Flush()
- return nil
-}
diff --git a/plumbing/format/packp/advrefs/encoder_test.go b/plumbing/format/packp/advrefs/encoder_test.go
deleted file mode 100644
index b4b085c..0000000
--- a/plumbing/format/packp/advrefs/encoder_test.go
+++ /dev/null
@@ -1,249 +0,0 @@
-package advrefs_test
-
-import (
- "bytes"
- "strings"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-
- . "gopkg.in/check.v1"
-)
-
-type SuiteEncoder struct{}
-
-var _ = Suite(&SuiteEncoder{})
-
-// returns a byte slice with the pkt-lines for the given payloads.
-func pktlines(c *C, payloads ...[]byte) []byte {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.Encode(payloads...)
- c.Assert(err, IsNil, Commentf("building pktlines for %v\n", payloads))
-
- return buf.Bytes()
-}
-
-func testEncode(c *C, input *advrefs.AdvRefs, expected []byte) {
- var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
- err := e.Encode(input)
- c.Assert(err, IsNil)
- obtained := buf.Bytes()
-
- comment := Commentf("\nobtained = %s\nexpected = %s\n", string(obtained), string(expected))
-
- c.Assert(obtained, DeepEquals, expected, comment)
-}
-
-func (s *SuiteEncoder) TestZeroValue(c *C) {
- ar := &advrefs.AdvRefs{}
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestHead(c *C) {
- hash := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- ar := &advrefs.AdvRefs{
- Head: &hash,
- }
-
- expected := pktlines(c,
- []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestCapsNoHead(c *C) {
- capabilities := packp.NewCapabilities()
- capabilities.Add("symref", "HEAD:/refs/heads/master")
- capabilities.Add("ofs-delta")
- capabilities.Add("multi_ack")
- ar := &advrefs.AdvRefs{
- Capabilities: capabilities,
- }
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestCapsWithHead(c *C) {
- hash := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- capabilities := packp.NewCapabilities()
- capabilities.Add("symref", "HEAD:/refs/heads/master")
- capabilities.Add("ofs-delta")
- capabilities.Add("multi_ack")
- ar := &advrefs.AdvRefs{
- Head: &hash,
- Capabilities: capabilities,
- }
-
- expected := pktlines(c,
- []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestRefs(c *C) {
- references := map[string]plumbing.Hash{
- "refs/heads/master": plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- "refs/tags/v2.6.12-tree": plumbing.NewHash("1111111111111111111111111111111111111111"),
- "refs/tags/v2.7.13-tree": plumbing.NewHash("3333333333333333333333333333333333333333"),
- "refs/tags/v2.6.13-tree": plumbing.NewHash("2222222222222222222222222222222222222222"),
- "refs/tags/v2.6.11-tree": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- }
- ar := &advrefs.AdvRefs{
- References: references,
- }
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"),
- []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"),
- []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"),
- []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"),
- []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestPeeled(c *C) {
- references := map[string]plumbing.Hash{
- "refs/heads/master": plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- "refs/tags/v2.6.12-tree": plumbing.NewHash("1111111111111111111111111111111111111111"),
- "refs/tags/v2.7.13-tree": plumbing.NewHash("3333333333333333333333333333333333333333"),
- "refs/tags/v2.6.13-tree": plumbing.NewHash("2222222222222222222222222222222222222222"),
- "refs/tags/v2.6.11-tree": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- }
- peeled := map[string]plumbing.Hash{
- "refs/tags/v2.7.13-tree": plumbing.NewHash("4444444444444444444444444444444444444444"),
- "refs/tags/v2.6.12-tree": plumbing.NewHash("5555555555555555555555555555555555555555"),
- }
- ar := &advrefs.AdvRefs{
- References: references,
- Peeled: peeled,
- }
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"),
- []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"),
- []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"),
- []byte("5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n"),
- []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"),
- []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"),
- []byte("4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestShallow(c *C) {
- shallows := []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- plumbing.NewHash("4444444444444444444444444444444444444444"),
- plumbing.NewHash("3333333333333333333333333333333333333333"),
- plumbing.NewHash("2222222222222222222222222222222222222222"),
- }
- ar := &advrefs.AdvRefs{
- Shallows: shallows,
- }
-
- expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- []byte("shallow 1111111111111111111111111111111111111111\n"),
- []byte("shallow 2222222222222222222222222222222222222222\n"),
- []byte("shallow 3333333333333333333333333333333333333333\n"),
- []byte("shallow 4444444444444444444444444444444444444444\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestAll(c *C) {
- hash := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
-
- capabilities := packp.NewCapabilities()
- capabilities.Add("symref", "HEAD:/refs/heads/master")
- capabilities.Add("ofs-delta")
- capabilities.Add("multi_ack")
-
- references := map[string]plumbing.Hash{
- "refs/heads/master": plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- "refs/tags/v2.6.12-tree": plumbing.NewHash("1111111111111111111111111111111111111111"),
- "refs/tags/v2.7.13-tree": plumbing.NewHash("3333333333333333333333333333333333333333"),
- "refs/tags/v2.6.13-tree": plumbing.NewHash("2222222222222222222222222222222222222222"),
- "refs/tags/v2.6.11-tree": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- }
-
- peeled := map[string]plumbing.Hash{
- "refs/tags/v2.7.13-tree": plumbing.NewHash("4444444444444444444444444444444444444444"),
- "refs/tags/v2.6.12-tree": plumbing.NewHash("5555555555555555555555555555555555555555"),
- }
-
- shallows := []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- plumbing.NewHash("4444444444444444444444444444444444444444"),
- plumbing.NewHash("3333333333333333333333333333333333333333"),
- plumbing.NewHash("2222222222222222222222222222222222222222"),
- }
-
- ar := &advrefs.AdvRefs{
- Head: &hash,
- Capabilities: capabilities,
- References: references,
- Peeled: peeled,
- Shallows: shallows,
- }
-
- expected := pktlines(c,
- []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"),
- []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"),
- []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"),
- []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"),
- []byte("5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n"),
- []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"),
- []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"),
- []byte("4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n"),
- []byte("shallow 1111111111111111111111111111111111111111\n"),
- []byte("shallow 2222222222222222222222222222222222222222\n"),
- []byte("shallow 3333333333333333333333333333333333333333\n"),
- []byte("shallow 4444444444444444444444444444444444444444\n"),
- pktline.Flush,
- )
-
- testEncode(c, ar, expected)
-}
-
-func (s *SuiteEncoder) TestErrorTooLong(c *C) {
- references := map[string]plumbing.Hash{
- strings.Repeat("a", pktline.MaxPayloadSize): plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- }
- ar := &advrefs.AdvRefs{
- References: references,
- }
-
- var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
- err := e.Encode(ar)
- c.Assert(err, ErrorMatches, ".*payload is too long.*")
-}
diff --git a/plumbing/format/packp/capabilities.go b/plumbing/format/packp/capabilities.go
deleted file mode 100644
index d77c2fa..0000000
--- a/plumbing/format/packp/capabilities.go
+++ /dev/null
@@ -1,136 +0,0 @@
-package packp
-
-import (
- "fmt"
- "sort"
- "strings"
-)
-
-// Capabilities contains all the server capabilities
-// https://github.com/git/git/blob/master/Documentation/technical/protocol-capabilities.txt
-type Capabilities struct {
- m map[string]*Capability
- o []string
-}
-
-// Capability represents a server capability
-type Capability struct {
- Name string
- Values []string
-}
-
-// NewCapabilities returns a new Capabilities struct
-func NewCapabilities() *Capabilities {
- return &Capabilities{
- m: make(map[string]*Capability),
- }
-}
-
-func (c *Capabilities) IsEmpty() bool {
- return len(c.o) == 0
-}
-
-// Decode decodes a string
-func (c *Capabilities) Decode(raw string) {
- params := strings.Split(raw, " ")
- for _, p := range params {
- s := strings.SplitN(p, "=", 2)
-
- var value string
- if len(s) == 2 {
- value = s[1]
- }
-
- c.Add(s[0], value)
- }
-}
-
-// Get returns the values for a capability
-func (c *Capabilities) Get(capability string) *Capability {
- return c.m[capability]
-}
-
-// Set sets a capability removing the values
-func (c *Capabilities) Set(capability string, values ...string) {
- if _, ok := c.m[capability]; ok {
- delete(c.m, capability)
- }
-
- c.Add(capability, values...)
-}
-
-// Add adds a capability, values are optional
-func (c *Capabilities) Add(capability string, values ...string) {
- if !c.Supports(capability) {
- c.m[capability] = &Capability{Name: capability}
- c.o = append(c.o, capability)
- }
-
- if len(values) == 0 {
- return
- }
-
- c.m[capability].Values = append(c.m[capability].Values, values...)
-}
-
-// Supports returns true if capability is present
-func (c *Capabilities) Supports(capability string) bool {
- _, ok := c.m[capability]
- return ok
-}
-
-// SymbolicReference returns the reference for a given symbolic reference
-func (c *Capabilities) SymbolicReference(sym string) string {
- if !c.Supports("symref") {
- return ""
- }
-
- for _, symref := range c.Get("symref").Values {
- parts := strings.Split(symref, ":")
- if len(parts) != 2 {
- continue
- }
-
- if parts[0] == sym {
- return parts[1]
- }
- }
-
- return ""
-}
-
-// Sorts capabilities in increasing order of their name
-func (c *Capabilities) Sort() {
- sort.Strings(c.o)
-}
-
-func (c *Capabilities) String() string {
- if len(c.o) == 0 {
- return ""
- }
-
- var o string
- for _, key := range c.o {
- cap := c.m[key]
-
- added := false
- for _, value := range cap.Values {
- if value == "" {
- continue
- }
-
- added = true
- o += fmt.Sprintf("%s=%s ", key, value)
- }
-
- if len(cap.Values) == 0 || !added {
- o += key + " "
- }
- }
-
- if len(o) == 0 {
- return o
- }
-
- return o[:len(o)-1]
-}
diff --git a/plumbing/format/packp/capabilities_test.go b/plumbing/format/packp/capabilities_test.go
deleted file mode 100644
index e42a0c7..0000000
--- a/plumbing/format/packp/capabilities_test.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package packp
-
-import (
- "testing"
-
- . "gopkg.in/check.v1"
-)
-
-func Test(t *testing.T) { TestingT(t) }
-
-type SuiteCapabilities struct{}
-
-var _ = Suite(&SuiteCapabilities{})
-
-func (s *SuiteCapabilities) TestDecode(c *C) {
- cap := NewCapabilities()
- cap.Decode("symref=foo symref=qux thin-pack")
-
- c.Assert(cap.m, HasLen, 2)
- c.Assert(cap.Get("symref").Values, DeepEquals, []string{"foo", "qux"})
- c.Assert(cap.Get("thin-pack").Values, DeepEquals, []string{""})
-}
-
-func (s *SuiteCapabilities) TestSet(c *C) {
- cap := NewCapabilities()
- cap.Add("symref", "foo", "qux")
- cap.Set("symref", "bar")
-
- c.Assert(cap.m, HasLen, 1)
- c.Assert(cap.Get("symref").Values, DeepEquals, []string{"bar"})
-}
-
-func (s *SuiteCapabilities) TestSetEmpty(c *C) {
- cap := NewCapabilities()
- cap.Set("foo", "bar")
-
- c.Assert(cap.Get("foo").Values, HasLen, 1)
-}
-
-func (s *SuiteCapabilities) TestAdd(c *C) {
- cap := NewCapabilities()
- cap.Add("symref", "foo", "qux")
- cap.Add("thin-pack")
-
- c.Assert(cap.String(), Equals, "symref=foo symref=qux thin-pack")
-}
diff --git a/plumbing/format/packp/doc.go b/plumbing/format/packp/doc.go
deleted file mode 100644
index 4950d1d..0000000
--- a/plumbing/format/packp/doc.go
+++ /dev/null
@@ -1,724 +0,0 @@
-package packp
-
-/*
-
-A nice way to trace the real data transmitted and received by git, use:
-
-GIT_TRACE_PACKET=true git ls-remote http://github.com/src-d/go-git
-GIT_TRACE_PACKET=true git clone http://github.com/src-d/go-git
-
-Here follows a copy of the current protocol specification at the time of
-this writing.
-
-(Please notice that most http git servers will add a flush-pkt after the
-first pkt-line when using HTTP smart.)
-
-
-Documentation Common to Pack and Http Protocols
-===============================================
-
-ABNF Notation
--------------
-
-ABNF notation as described by RFC 5234 is used within the protocol documents,
-except the following replacement core rules are used:
-----
- HEXDIG = DIGIT / "a" / "b" / "c" / "d" / "e" / "f"
-----
-
-We also define the following common rules:
-----
- NUL = %x00
- zero-id = 40*"0"
- obj-id = 40*(HEXDIGIT)
-
- refname = "HEAD"
- refname /= "refs/" <see discussion below>
-----
-
-A refname is a hierarchical octet string beginning with "refs/" and
-not violating the 'git-check-ref-format' command's validation rules.
-More specifically, they:
-
-. They can include slash `/` for hierarchical (directory)
- grouping, but no slash-separated component can begin with a
- dot `.`.
-
-. They must contain at least one `/`. This enforces the presence of a
- category like `heads/`, `tags/` etc. but the actual names are not
- restricted.
-
-. They cannot have two consecutive dots `..` anywhere.
-
-. They cannot have ASCII control characters (i.e. bytes whose
- values are lower than \040, or \177 `DEL`), space, tilde `~`,
- caret `^`, colon `:`, question-mark `?`, asterisk `*`,
- or open bracket `[` anywhere.
-
-. They cannot end with a slash `/` or a dot `.`.
-
-. They cannot end with the sequence `.lock`.
-
-. They cannot contain a sequence `@{`.
-
-. They cannot contain a `\\`.
-
-
-pkt-line Format
----------------
-
-Much (but not all) of the payload is described around pkt-lines.
-
-A pkt-line is a variable length binary string. The first four bytes
-of the line, the pkt-len, indicates the total length of the line,
-in hexadecimal. The pkt-len includes the 4 bytes used to contain
-the length's hexadecimal representation.
-
-A pkt-line MAY contain binary data, so implementors MUST ensure
-pkt-line parsing/formatting routines are 8-bit clean.
-
-A non-binary line SHOULD BE terminated by an LF, which if present
-MUST be included in the total length. Receivers MUST treat pkt-lines
-with non-binary data the same whether or not they contain the trailing
-LF (stripping the LF if present, and not complaining when it is
-missing).
-
-The maximum length of a pkt-line's data component is 65516 bytes.
-Implementations MUST NOT send pkt-line whose length exceeds 65520
-(65516 bytes of payload + 4 bytes of length data).
-
-Implementations SHOULD NOT send an empty pkt-line ("0004").
-
-A pkt-line with a length field of 0 ("0000"), called a flush-pkt,
-is a special case and MUST be handled differently than an empty
-pkt-line ("0004").
-
-----
- pkt-line = data-pkt / flush-pkt
-
- data-pkt = pkt-len pkt-payload
- pkt-len = 4*(HEXDIG)
- pkt-payload = (pkt-len - 4)*(OCTET)
-
- flush-pkt = "0000"
-----
-
-Examples (as C-style strings):
-
-----
- pkt-line actual value
- ---------------------------------
- "0006a\n" "a\n"
- "0005a" "a"
- "000bfoobar\n" "foobar\n"
- "0004" ""
-----
-
-Packfile transfer protocols
-===========================
-
-Git supports transferring data in packfiles over the ssh://, git://, http:// and
-file:// transports. There exist two sets of protocols, one for pushing
-data from a client to a server and another for fetching data from a
-server to a client. The three transports (ssh, git, file) use the same
-protocol to transfer data. http is documented in http-protocol.txt.
-
-The processes invoked in the canonical Git implementation are 'upload-pack'
-on the server side and 'fetch-pack' on the client side for fetching data;
-then 'receive-pack' on the server and 'send-pack' on the client for pushing
-data. The protocol functions to have a server tell a client what is
-currently on the server, then for the two to negotiate the smallest amount
-of data to send in order to fully update one or the other.
-
-pkt-line Format
----------------
-
-The descriptions below build on the pkt-line format described in
-protocol-common.txt. When the grammar indicate `PKT-LINE(...)`, unless
-otherwise noted the usual pkt-line LF rules apply: the sender SHOULD
-include a LF, but the receiver MUST NOT complain if it is not present.
-
-Transports
-----------
-There are three transports over which the packfile protocol is
-initiated. The Git transport is a simple, unauthenticated server that
-takes the command (almost always 'upload-pack', though Git
-servers can be configured to be globally writable, in which 'receive-
-pack' initiation is also allowed) with which the client wishes to
-communicate and executes it and connects it to the requesting
-process.
-
-In the SSH transport, the client just runs the 'upload-pack'
-or 'receive-pack' process on the server over the SSH protocol and then
-communicates with that invoked process over the SSH connection.
-
-The file:// transport runs the 'upload-pack' or 'receive-pack'
-process locally and communicates with it over a pipe.
-
-Git Transport
--------------
-
-The Git transport starts off by sending the command and repository
-on the wire using the pkt-line format, followed by a NUL byte and a
-hostname parameter, terminated by a NUL byte.
-
- 0032git-upload-pack /project.git\0host=myserver.com\0
-
---
- git-proto-request = request-command SP pathname NUL [ host-parameter NUL ]
- request-command = "git-upload-pack" / "git-receive-pack" /
- "git-upload-archive" ; case sensitive
- pathname = *( %x01-ff ) ; exclude NUL
- host-parameter = "host=" hostname [ ":" port ]
---
-
-Only host-parameter is allowed in the git-proto-request. Clients
-MUST NOT attempt to send additional parameters. It is used for the
-git-daemon name based virtual hosting. See --interpolated-path
-option to git daemon, with the %H/%CH format characters.
-
-Basically what the Git client is doing to connect to an 'upload-pack'
-process on the server side over the Git protocol is this:
-
- $ echo -e -n \
- "0039git-upload-pack /schacon/gitbook.git\0host=example.com\0" |
- nc -v example.com 9418
-
-If the server refuses the request for some reasons, it could abort
-gracefully with an error message.
-
-----
- error-line = PKT-LINE("ERR" SP explanation-text)
-----
-
-
-SSH Transport
--------------
-
-Initiating the upload-pack or receive-pack processes over SSH is
-executing the binary on the server via SSH remote execution.
-It is basically equivalent to running this:
-
- $ ssh git.example.com "git-upload-pack '/project.git'"
-
-For a server to support Git pushing and pulling for a given user over
-SSH, that user needs to be able to execute one or both of those
-commands via the SSH shell that they are provided on login. On some
-systems, that shell access is limited to only being able to run those
-two commands, or even just one of them.
-
-In an ssh:// format URI, it's absolute in the URI, so the '/' after
-the host name (or port number) is sent as an argument, which is then
-read by the remote git-upload-pack exactly as is, so it's effectively
-an absolute path in the remote filesystem.
-
- git clone ssh://user@example.com/project.git
- |
- v
- ssh user@example.com "git-upload-pack '/project.git'"
-
-In a "user@host:path" format URI, its relative to the user's home
-directory, because the Git client will run:
-
- git clone user@example.com:project.git
- |
- v
- ssh user@example.com "git-upload-pack 'project.git'"
-
-The exception is if a '~' is used, in which case
-we execute it without the leading '/'.
-
- ssh://user@example.com/~alice/project.git,
- |
- v
- ssh user@example.com "git-upload-pack '~alice/project.git'"
-
-A few things to remember here:
-
-- The "command name" is spelled with dash (e.g. git-upload-pack), but
- this can be overridden by the client;
-
-- The repository path is always quoted with single quotes.
-
-Fetching Data From a Server
----------------------------
-
-When one Git repository wants to get data that a second repository
-has, the first can 'fetch' from the second. This operation determines
-what data the server has that the client does not then streams that
-data down to the client in packfile format.
-
-
-Reference Discovery
--------------------
-
-When the client initially connects the server will immediately respond
-with a listing of each reference it has (all branches and tags) along
-with the object name that each reference currently points to.
-
- $ echo -e -n "0039git-upload-pack /schacon/gitbook.git\0host=example.com\0" |
- nc -v example.com 9418
- 00887217a7c7e582c46cec22a130adf4b9d7d950fba0 HEAD\0multi_ack thin-pack
- side-band side-band-64k ofs-delta shallow no-progress include-tag
- 00441d3fcd5ced445d1abc402225c0b8a1299641f497 refs/heads/integration
- 003f7217a7c7e582c46cec22a130adf4b9d7d950fba0 refs/heads/master
- 003cb88d2441cac0977faf98efc80305012112238d9d refs/tags/v0.9
- 003c525128480b96c89e6418b1e40909bf6c5b2d580f refs/tags/v1.0
- 003fe92df48743b7bc7d26bcaabfddde0a1e20cae47c refs/tags/v1.0^{}
- 0000
-
-The returned response is a pkt-line stream describing each ref and
-its current value. The stream MUST be sorted by name according to
-the C locale ordering.
-
-If HEAD is a valid ref, HEAD MUST appear as the first advertised
-ref. If HEAD is not a valid ref, HEAD MUST NOT appear in the
-advertisement list at all, but other refs may still appear.
-
-The stream MUST include capability declarations behind a NUL on the
-first ref. The peeled value of a ref (that is "ref^{}") MUST be
-immediately after the ref itself, if presented. A conforming server
-MUST peel the ref if it's an annotated tag.
-
-----
- advertised-refs = (no-refs / list-of-refs)
- *shallow
- flush-pkt
-
- no-refs = PKT-LINE(zero-id SP "capabilities^{}"
- NUL capability-list)
-
- list-of-refs = first-ref *other-ref
- first-ref = PKT-LINE(obj-id SP refname
- NUL capability-list)
-
- other-ref = PKT-LINE(other-tip / other-peeled)
- other-tip = obj-id SP refname
- other-peeled = obj-id SP refname "^{}"
-
- shallow = PKT-LINE("shallow" SP obj-id)
-
- capability-list = capability *(SP capability)
- capability = 1*(LC_ALPHA / DIGIT / "-" / "_")
- LC_ALPHA = %x61-7A
-----
-
-Server and client MUST use lowercase for obj-id, both MUST treat obj-id
-as case-insensitive.
-
-See protocol-capabilities.txt for a list of allowed server capabilities
-and descriptions.
-
-Packfile Negotiation
---------------------
-After reference and capabilities discovery, the client can decide to
-terminate the connection by sending a flush-pkt, telling the server it can
-now gracefully terminate, and disconnect, when it does not need any pack
-data. This can happen with the ls-remote command, and also can happen when
-the client already is up-to-date.
-
-Otherwise, it enters the negotiation phase, where the client and
-server determine what the minimal packfile necessary for transport is,
-by telling the server what objects it wants, its shallow objects
-(if any), and the maximum commit depth it wants (if any). The client
-will also send a list of the capabilities it wants to be in effect,
-out of what the server said it could do with the first 'want' line.
-
-----
- upload-request = want-list
- *shallow-line
- *1depth-request
- flush-pkt
-
- want-list = first-want
- *additional-want
-
- shallow-line = PKT-LINE("shallow" SP obj-id)
-
- depth-request = PKT-LINE("deepen" SP depth) /
- PKT-LINE("deepen-since" SP timestamp) /
- PKT-LINE("deepen-not" SP ref)
-
- first-want = PKT-LINE("want" SP obj-id SP capability-list)
- additional-want = PKT-LINE("want" SP obj-id)
-
- depth = 1*DIGIT
-----
-
-Clients MUST send all the obj-ids it wants from the reference
-discovery phase as 'want' lines. Clients MUST send at least one
-'want' command in the request body. Clients MUST NOT mention an
-obj-id in a 'want' command which did not appear in the response
-obtained through ref discovery.
-
-The client MUST write all obj-ids which it only has shallow copies
-of (meaning that it does not have the parents of a commit) as
-'shallow' lines so that the server is aware of the limitations of
-the client's history.
-
-The client now sends the maximum commit history depth it wants for
-this transaction, which is the number of commits it wants from the
-tip of the history, if any, as a 'deepen' line. A depth of 0 is the
-same as not making a depth request. The client does not want to receive
-any commits beyond this depth, nor does it want objects needed only to
-complete those commits. Commits whose parents are not received as a
-result are defined as shallow and marked as such in the server. This
-information is sent back to the client in the next step.
-
-Once all the 'want's and 'shallow's (and optional 'deepen') are
-transferred, clients MUST send a flush-pkt, to tell the server side
-that it is done sending the list.
-
-Otherwise, if the client sent a positive depth request, the server
-will determine which commits will and will not be shallow and
-send this information to the client. If the client did not request
-a positive depth, this step is skipped.
-
-----
- shallow-update = *shallow-line
- *unshallow-line
- flush-pkt
-
- shallow-line = PKT-LINE("shallow" SP obj-id)
-
- unshallow-line = PKT-LINE("unshallow" SP obj-id)
-----
-
-If the client has requested a positive depth, the server will compute
-the set of commits which are no deeper than the desired depth. The set
-of commits start at the client's wants.
-
-The server writes 'shallow' lines for each
-commit whose parents will not be sent as a result. The server writes
-an 'unshallow' line for each commit which the client has indicated is
-shallow, but is no longer shallow at the currently requested depth
-(that is, its parents will now be sent). The server MUST NOT mark
-as unshallow anything which the client has not indicated was shallow.
-
-Now the client will send a list of the obj-ids it has using 'have'
-lines, so the server can make a packfile that only contains the objects
-that the client needs. In multi_ack mode, the canonical implementation
-will send up to 32 of these at a time, then will send a flush-pkt. The
-canonical implementation will skip ahead and send the next 32 immediately,
-so that there is always a block of 32 "in-flight on the wire" at a time.
-
-----
- upload-haves = have-list
- compute-end
-
- have-list = *have-line
- have-line = PKT-LINE("have" SP obj-id)
- compute-end = flush-pkt / PKT-LINE("done")
-----
-
-If the server reads 'have' lines, it then will respond by ACKing any
-of the obj-ids the client said it had that the server also has. The
-server will ACK obj-ids differently depending on which ack mode is
-chosen by the client.
-
-In multi_ack mode:
-
- * the server will respond with 'ACK obj-id continue' for any common
- commits.
-
- * once the server has found an acceptable common base commit and is
- ready to make a packfile, it will blindly ACK all 'have' obj-ids
- back to the client.
-
- * the server will then send a 'NAK' and then wait for another response
- from the client - either a 'done' or another list of 'have' lines.
-
-In multi_ack_detailed mode:
-
- * the server will differentiate the ACKs where it is signaling
- that it is ready to send data with 'ACK obj-id ready' lines, and
- signals the identified common commits with 'ACK obj-id common' lines.
-
-Without either multi_ack or multi_ack_detailed:
-
- * upload-pack sends "ACK obj-id" on the first common object it finds.
- After that it says nothing until the client gives it a "done".
-
- * upload-pack sends "NAK" on a flush-pkt if no common object
- has been found yet. If one has been found, and thus an ACK
- was already sent, it's silent on the flush-pkt.
-
-After the client has gotten enough ACK responses that it can determine
-that the server has enough information to send an efficient packfile
-(in the canonical implementation, this is determined when it has received
-enough ACKs that it can color everything left in the --date-order queue
-as common with the server, or the --date-order queue is empty), or the
-client determines that it wants to give up (in the canonical implementation,
-this is determined when the client sends 256 'have' lines without getting
-any of them ACKed by the server - meaning there is nothing in common and
-the server should just send all of its objects), then the client will send
-a 'done' command. The 'done' command signals to the server that the client
-is ready to receive its packfile data.
-
-However, the 256 limit *only* turns on in the canonical client
-implementation if we have received at least one "ACK %s continue"
-during a prior round. This helps to ensure that at least one common
-ancestor is found before we give up entirely.
-
-Once the 'done' line is read from the client, the server will either
-send a final 'ACK obj-id' or it will send a 'NAK'. 'obj-id' is the object
-name of the last commit determined to be common. The server only sends
-ACK after 'done' if there is at least one common base and multi_ack or
-multi_ack_detailed is enabled. The server always sends NAK after 'done'
-if there is no common base found.
-
-Then the server will start sending its packfile data.
-
-----
- server-response = *ack_multi ack / nak
- ack_multi = PKT-LINE("ACK" SP obj-id ack_status)
- ack_status = "continue" / "common" / "ready"
- ack = PKT-LINE("ACK" SP obj-id)
- nak = PKT-LINE("NAK")
-----
-
-A simple clone may look like this (with no 'have' lines):
-
-----
- C: 0054want 74730d410fcb6603ace96f1dc55ea6196122532d multi_ack \
- side-band-64k ofs-delta\n
- C: 0032want 7d1665144a3a975c05f1f43902ddaf084e784dbe\n
- C: 0032want 5a3f6be755bbb7deae50065988cbfa1ffa9ab68a\n
- C: 0032want 7e47fe2bd8d01d481f44d7af0531bd93d3b21c01\n
- C: 0032want 74730d410fcb6603ace96f1dc55ea6196122532d\n
- C: 0000
- C: 0009done\n
-
- S: 0008NAK\n
- S: [PACKFILE]
-----
-
-An incremental update (fetch) response might look like this:
-
-----
- C: 0054want 74730d410fcb6603ace96f1dc55ea6196122532d multi_ack \
- side-band-64k ofs-delta\n
- C: 0032want 7d1665144a3a975c05f1f43902ddaf084e784dbe\n
- C: 0032want 5a3f6be755bbb7deae50065988cbfa1ffa9ab68a\n
- C: 0000
- C: 0032have 7e47fe2bd8d01d481f44d7af0531bd93d3b21c01\n
- C: [30 more have lines]
- C: 0032have 74730d410fcb6603ace96f1dc55ea6196122532d\n
- C: 0000
-
- S: 003aACK 7e47fe2bd8d01d481f44d7af0531bd93d3b21c01 continue\n
- S: 003aACK 74730d410fcb6603ace96f1dc55ea6196122532d continue\n
- S: 0008NAK\n
-
- C: 0009done\n
-
- S: 0031ACK 74730d410fcb6603ace96f1dc55ea6196122532d\n
- S: [PACKFILE]
-----
-
-
-Packfile Data
--------------
-
-Now that the client and server have finished negotiation about what
-the minimal amount of data that needs to be sent to the client is, the server
-will construct and send the required data in packfile format.
-
-See pack-format.txt for what the packfile itself actually looks like.
-
-If 'side-band' or 'side-band-64k' capabilities have been specified by
-the client, the server will send the packfile data multiplexed.
-
-Each packet starting with the packet-line length of the amount of data
-that follows, followed by a single byte specifying the sideband the
-following data is coming in on.
-
-In 'side-band' mode, it will send up to 999 data bytes plus 1 control
-code, for a total of up to 1000 bytes in a pkt-line. In 'side-band-64k'
-mode it will send up to 65519 data bytes plus 1 control code, for a
-total of up to 65520 bytes in a pkt-line.
-
-The sideband byte will be a '1', '2' or a '3'. Sideband '1' will contain
-packfile data, sideband '2' will be used for progress information that the
-client will generally print to stderr and sideband '3' is used for error
-information.
-
-If no 'side-band' capability was specified, the server will stream the
-entire packfile without multiplexing.
-
-
-Pushing Data To a Server
-------------------------
-
-Pushing data to a server will invoke the 'receive-pack' process on the
-server, which will allow the client to tell it which references it should
-update and then send all the data the server will need for those new
-references to be complete. Once all the data is received and validated,
-the server will then update its references to what the client specified.
-
-Authentication
---------------
-
-The protocol itself contains no authentication mechanisms. That is to be
-handled by the transport, such as SSH, before the 'receive-pack' process is
-invoked. If 'receive-pack' is configured over the Git transport, those
-repositories will be writable by anyone who can access that port (9418) as
-that transport is unauthenticated.
-
-Reference Discovery
--------------------
-
-The reference discovery phase is done nearly the same way as it is in the
-fetching protocol. Each reference obj-id and name on the server is sent
-in packet-line format to the client, followed by a flush-pkt. The only
-real difference is that the capability listing is different - the only
-possible values are 'report-status', 'delete-refs', 'ofs-delta' and
-'push-options'.
-
-Reference Update Request and Packfile Transfer
-----------------------------------------------
-
-Once the client knows what references the server is at, it can send a
-list of reference update requests. For each reference on the server
-that it wants to update, it sends a line listing the obj-id currently on
-the server, the obj-id the client would like to update it to and the name
-of the reference.
-
-This list is followed by a flush-pkt. Then the push options are transmitted
-one per packet followed by another flush-pkt. After that the packfile that
-should contain all the objects that the server will need to complete the new
-references will be sent.
-
-----
- update-request = *shallow ( command-list | push-cert ) [packfile]
-
- shallow = PKT-LINE("shallow" SP obj-id)
-
- command-list = PKT-LINE(command NUL capability-list)
- *PKT-LINE(command)
- flush-pkt
-
- command = create / delete / update
- create = zero-id SP new-id SP name
- delete = old-id SP zero-id SP name
- update = old-id SP new-id SP name
-
- old-id = obj-id
- new-id = obj-id
-
- push-cert = PKT-LINE("push-cert" NUL capability-list LF)
- PKT-LINE("certificate version 0.1" LF)
- PKT-LINE("pusher" SP ident LF)
- PKT-LINE("pushee" SP url LF)
- PKT-LINE("nonce" SP nonce LF)
- PKT-LINE(LF)
- *PKT-LINE(command LF)
- *PKT-LINE(gpg-signature-lines LF)
- PKT-LINE("push-cert-end" LF)
-
- packfile = "PACK" 28*(OCTET)
-----
-
-If the receiving end does not support delete-refs, the sending end MUST
-NOT ask for delete command.
-
-If the receiving end does not support push-cert, the sending end
-MUST NOT send a push-cert command. When a push-cert command is
-sent, command-list MUST NOT be sent; the commands recorded in the
-push certificate is used instead.
-
-The packfile MUST NOT be sent if the only command used is 'delete'.
-
-A packfile MUST be sent if either create or update command is used,
-even if the server already has all the necessary objects. In this
-case the client MUST send an empty packfile. The only time this
-is likely to happen is if the client is creating
-a new branch or a tag that points to an existing obj-id.
-
-The server will receive the packfile, unpack it, then validate each
-reference that is being updated that it hasn't changed while the request
-was being processed (the obj-id is still the same as the old-id), and
-it will run any update hooks to make sure that the update is acceptable.
-If all of that is fine, the server will then update the references.
-
-Push Certificate
-----------------
-
-A push certificate begins with a set of header lines. After the
-header and an empty line, the protocol commands follow, one per
-line. Note that the trailing LF in push-cert PKT-LINEs is _not_
-optional; it must be present.
-
-Currently, the following header fields are defined:
-
-`pusher` ident::
- Identify the GPG key in "Human Readable Name <email@address>"
- format.
-
-`pushee` url::
- The repository URL (anonymized, if the URL contains
- authentication material) the user who ran `git push`
- intended to push into.
-
-`nonce` nonce::
- The 'nonce' string the receiving repository asked the
- pushing user to include in the certificate, to prevent
- replay attacks.
-
-The GPG signature lines are a detached signature for the contents
-recorded in the push certificate before the signature block begins.
-The detached signature is used to certify that the commands were
-given by the pusher, who must be the signer.
-
-Report Status
--------------
-
-After receiving the pack data from the sender, the receiver sends a
-report if 'report-status' capability is in effect.
-It is a short listing of what happened in that update. It will first
-list the status of the packfile unpacking as either 'unpack ok' or
-'unpack [error]'. Then it will list the status for each of the references
-that it tried to update. Each line is either 'ok [refname]' if the
-update was successful, or 'ng [refname] [error]' if the update was not.
-
-----
- report-status = unpack-status
- 1*(command-status)
- flush-pkt
-
- unpack-status = PKT-LINE("unpack" SP unpack-result)
- unpack-result = "ok" / error-msg
-
- command-status = command-ok / command-fail
- command-ok = PKT-LINE("ok" SP refname)
- command-fail = PKT-LINE("ng" SP refname SP error-msg)
-
- error-msg = 1*(OCTECT) ; where not "ok"
-----
-
-Updates can be unsuccessful for a number of reasons. The reference can have
-changed since the reference discovery phase was originally sent, meaning
-someone pushed in the meantime. The reference being pushed could be a
-non-fast-forward reference and the update hooks or configuration could be
-set to not allow that, etc. Also, some references can be updated while others
-can be rejected.
-
-An example client/server communication might look like this:
-
-----
- S: 007c74730d410fcb6603ace96f1dc55ea6196122532d refs/heads/local\0report-status delete-refs ofs-delta\n
- S: 003e7d1665144a3a975c05f1f43902ddaf084e784dbe refs/heads/debug\n
- S: 003f74730d410fcb6603ace96f1dc55ea6196122532d refs/heads/master\n
- S: 003f74730d410fcb6603ace96f1dc55ea6196122532d refs/heads/team\n
- S: 0000
-
- C: 003e7d1665144a3a975c05f1f43902ddaf084e784dbe 74730d410fcb6603ace96f1dc55ea6196122532d refs/heads/debug\n
- C: 003e74730d410fcb6603ace96f1dc55ea6196122532d 5a3f6be755bbb7deae50065988cbfa1ffa9ab68a refs/heads/master\n
- C: 0000
- C: [PACKDATA]
-
- S: 000eunpack ok\n
- S: 0018ok refs/heads/debug\n
- S: 002ang refs/heads/master non-fast-forward\n
-----
-*/
diff --git a/plumbing/format/packp/ulreq/decoder.go b/plumbing/format/packp/ulreq/decoder.go
deleted file mode 100644
index 9083e04..0000000
--- a/plumbing/format/packp/ulreq/decoder.go
+++ /dev/null
@@ -1,287 +0,0 @@
-package ulreq
-
-import (
- "bytes"
- "encoding/hex"
- "fmt"
- "io"
- "strconv"
- "time"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-)
-
-const (
- hashSize = 40
-)
-
-var (
- eol = []byte("\n")
- sp = []byte(" ")
- want = []byte("want ")
- shallow = []byte("shallow ")
- deepen = []byte("deepen")
- deepenCommits = []byte("deepen ")
- deepenSince = []byte("deepen-since ")
- deepenReference = []byte("deepen-not ")
-)
-
-// A Decoder reads and decodes AdvRef values from an input stream.
-type Decoder struct {
- s *pktline.Scanner // a pkt-line scanner from the input stream
- line []byte // current pkt-line contents, use parser.nextLine() to make it advance
- nLine int // current pkt-line number for debugging, begins at 1
- err error // sticky error, use the parser.error() method to fill this out
- data *UlReq // parsed data is stored here
-}
-
-// NewDecoder returns a new decoder that reads from r.
-//
-// Will not read more data from r than necessary.
-func NewDecoder(r io.Reader) *Decoder {
- return &Decoder{
- s: pktline.NewScanner(r),
- }
-}
-
-// Decode reads the next upload-request form its input and
-// stores it in the value pointed to by v.
-func (d *Decoder) Decode(v *UlReq) error {
- d.data = v
-
- for state := decodeFirstWant; state != nil; {
- state = state(d)
- }
-
- return d.err
-}
-
-type decoderStateFn func(*Decoder) decoderStateFn
-
-// fills out the parser stiky error
-func (d *Decoder) error(format string, a ...interface{}) {
- d.err = fmt.Errorf("pkt-line %d: %s", d.nLine,
- fmt.Sprintf(format, a...))
-}
-
-// Reads a new pkt-line from the scanner, makes its payload available as
-// p.line and increments p.nLine. A successful invocation returns true,
-// otherwise, false is returned and the sticky error is filled out
-// accordingly. Trims eols at the end of the payloads.
-func (d *Decoder) nextLine() bool {
- d.nLine++
-
- if !d.s.Scan() {
- if d.err = d.s.Err(); d.err != nil {
- return false
- }
-
- d.error("EOF")
- return false
- }
-
- d.line = d.s.Bytes()
- d.line = bytes.TrimSuffix(d.line, eol)
-
- return true
-}
-
-// Expected format: want <hash>[ capabilities]
-func decodeFirstWant(d *Decoder) decoderStateFn {
- if ok := d.nextLine(); !ok {
- return nil
- }
-
- if !bytes.HasPrefix(d.line, want) {
- d.error("missing 'want ' prefix")
- return nil
- }
- d.line = bytes.TrimPrefix(d.line, want)
-
- hash, ok := d.readHash()
- if !ok {
- return nil
- }
- d.data.Wants = append(d.data.Wants, hash)
-
- return decodeCaps
-}
-
-func (d *Decoder) readHash() (plumbing.Hash, bool) {
- if len(d.line) < hashSize {
- d.err = fmt.Errorf("malformed hash: %v", d.line)
- return plumbing.ZeroHash, false
- }
-
- var hash plumbing.Hash
- if _, err := hex.Decode(hash[:], d.line[:hashSize]); err != nil {
- d.error("invalid hash text: %s", err)
- return plumbing.ZeroHash, false
- }
- d.line = d.line[hashSize:]
-
- return hash, true
-}
-
-// Expected format: sp cap1 sp cap2 sp cap3...
-func decodeCaps(d *Decoder) decoderStateFn {
- if len(d.line) == 0 {
- return decodeOtherWants
- }
-
- d.line = bytes.TrimPrefix(d.line, sp)
-
- for _, c := range bytes.Split(d.line, sp) {
- name, values := readCapability(c)
- d.data.Capabilities.Add(name, values...)
- }
-
- return decodeOtherWants
-}
-
-// Capabilities are a single string or a name=value.
-// Even though we are only going to read at moust 1 value, we return
-// a slice of values, as Capability.Add receives that.
-func readCapability(data []byte) (name string, values []string) {
- pair := bytes.SplitN(data, []byte{'='}, 2)
- if len(pair) == 2 {
- values = append(values, string(pair[1]))
- }
-
- return string(pair[0]), values
-}
-
-// Expected format: want <hash>
-func decodeOtherWants(d *Decoder) decoderStateFn {
- if ok := d.nextLine(); !ok {
- return nil
- }
-
- if bytes.HasPrefix(d.line, shallow) {
- return decodeShallow
- }
-
- if bytes.HasPrefix(d.line, deepen) {
- return decodeDeepen
- }
-
- if len(d.line) == 0 {
- return nil
- }
-
- if !bytes.HasPrefix(d.line, want) {
- d.error("unexpected payload while expecting a want: %q", d.line)
- return nil
- }
- d.line = bytes.TrimPrefix(d.line, want)
-
- hash, ok := d.readHash()
- if !ok {
- return nil
- }
- d.data.Wants = append(d.data.Wants, hash)
-
- return decodeOtherWants
-}
-
-// Expected format: shallow <hash>
-func decodeShallow(d *Decoder) decoderStateFn {
- if bytes.HasPrefix(d.line, deepen) {
- return decodeDeepen
- }
-
- if len(d.line) == 0 {
- return nil
- }
-
- if !bytes.HasPrefix(d.line, shallow) {
- d.error("unexpected payload while expecting a shallow: %q", d.line)
- return nil
- }
- d.line = bytes.TrimPrefix(d.line, shallow)
-
- hash, ok := d.readHash()
- if !ok {
- return nil
- }
- d.data.Shallows = append(d.data.Shallows, hash)
-
- if ok := d.nextLine(); !ok {
- return nil
- }
-
- return decodeShallow
-}
-
-// Expected format: deepen <n> / deepen-since <ul> / deepen-not <ref>
-func decodeDeepen(d *Decoder) decoderStateFn {
- if bytes.HasPrefix(d.line, deepenCommits) {
- return decodeDeepenCommits
- }
-
- if bytes.HasPrefix(d.line, deepenSince) {
- return decodeDeepenSince
- }
-
- if bytes.HasPrefix(d.line, deepenReference) {
- return decodeDeepenReference
- }
-
- if len(d.line) == 0 {
- return nil
- }
-
- d.error("unexpected deepen specification: %q", d.line)
- return nil
-}
-
-func decodeDeepenCommits(d *Decoder) decoderStateFn {
- d.line = bytes.TrimPrefix(d.line, deepenCommits)
-
- var n int
- if n, d.err = strconv.Atoi(string(d.line)); d.err != nil {
- return nil
- }
- if n < 0 {
- d.err = fmt.Errorf("negative depth")
- return nil
- }
- d.data.Depth = DepthCommits(n)
-
- return decodeFlush
-}
-
-func decodeDeepenSince(d *Decoder) decoderStateFn {
- d.line = bytes.TrimPrefix(d.line, deepenSince)
-
- var secs int64
- secs, d.err = strconv.ParseInt(string(d.line), 10, 64)
- if d.err != nil {
- return nil
- }
- t := time.Unix(secs, 0).UTC()
- d.data.Depth = DepthSince(t)
-
- return decodeFlush
-}
-
-func decodeDeepenReference(d *Decoder) decoderStateFn {
- d.line = bytes.TrimPrefix(d.line, deepenReference)
-
- d.data.Depth = DepthReference(string(d.line))
-
- return decodeFlush
-}
-
-func decodeFlush(d *Decoder) decoderStateFn {
- if ok := d.nextLine(); !ok {
- return nil
- }
-
- if len(d.line) != 0 {
- d.err = fmt.Errorf("unexpected payload while expecting a flush-pkt: %q", d.line)
- }
-
- return nil
-}
diff --git a/plumbing/format/packp/ulreq/decoder_test.go b/plumbing/format/packp/ulreq/decoder_test.go
deleted file mode 100644
index 01e4f90..0000000
--- a/plumbing/format/packp/ulreq/decoder_test.go
+++ /dev/null
@@ -1,541 +0,0 @@
-package ulreq
-
-import (
- "bytes"
- "io"
- "sort"
- "time"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-
- . "gopkg.in/check.v1"
-)
-
-type SuiteDecoder struct{}
-
-var _ = Suite(&SuiteDecoder{})
-
-func (s *SuiteDecoder) TestEmpty(c *C) {
- ur := New()
- var buf bytes.Buffer
- d := NewDecoder(&buf)
-
- err := d.Decode(ur)
- c.Assert(err, ErrorMatches, "pkt-line 1: EOF")
-}
-
-func (s *SuiteDecoder) TestNoWant(c *C) {
- payloads := []string{
- "foobar",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*missing 'want '.*")
-}
-
-func toPktLines(c *C, payloads []string) io.Reader {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil)
-
- return &buf
-}
-
-func testDecoderErrorMatches(c *C, input io.Reader, pattern string) {
- ur := New()
- d := NewDecoder(input)
-
- err := d.Decode(ur)
- c.Assert(err, ErrorMatches, pattern)
-}
-
-func (s *SuiteDecoder) TestInvalidFirstHash(c *C) {
- payloads := []string{
- "want 6ecf0ef2c2dffb796alberto2219af86ec6584e5\n",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid hash.*")
-}
-
-func (s *SuiteDecoder) TestWantOK(c *C) {
- payloads := []string{
- "want 1111111111111111111111111111111111111111",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- c.Assert(ur.Wants, DeepEquals, []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- })
-}
-
-func testDecodeOK(c *C, payloads []string) *UlReq {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil)
-
- ur := New()
- d := NewDecoder(&buf)
-
- err = d.Decode(ur)
- c.Assert(err, IsNil)
-
- return ur
-}
-
-func (s *SuiteDecoder) TestWantWithCapabilities(c *C) {
- payloads := []string{
- "want 1111111111111111111111111111111111111111 ofs-delta multi_ack",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
- c.Assert(ur.Wants, DeepEquals, []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111")})
-
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
-}
-
-func (s *SuiteDecoder) TestManyWantsNoCapabilities(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333",
- "want 4444444444444444444444444444444444444444",
- "want 1111111111111111111111111111111111111111",
- "want 2222222222222222222222222222222222222222",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- expected := []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- plumbing.NewHash("2222222222222222222222222222222222222222"),
- plumbing.NewHash("3333333333333333333333333333333333333333"),
- plumbing.NewHash("4444444444444444444444444444444444444444"),
- }
-
- sort.Sort(byHash(ur.Wants))
- sort.Sort(byHash(expected))
- c.Assert(ur.Wants, DeepEquals, expected)
-}
-
-type byHash []plumbing.Hash
-
-func (a byHash) Len() int { return len(a) }
-func (a byHash) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
-func (a byHash) Less(i, j int) bool {
- ii := [20]byte(a[i])
- jj := [20]byte(a[j])
- return bytes.Compare(ii[:], jj[:]) < 0
-}
-
-func (s *SuiteDecoder) TestManyWantsBadWant(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333",
- "want 4444444444444444444444444444444444444444",
- "foo",
- "want 2222222222222222222222222222222222222222",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
-}
-
-func (s *SuiteDecoder) TestManyWantsInvalidHash(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333",
- "want 4444444444444444444444444444444444444444",
- "want 1234567890abcdef",
- "want 2222222222222222222222222222222222222222",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed hash.*")
-}
-
-func (s *SuiteDecoder) TestManyWantsWithCapabilities(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "want 4444444444444444444444444444444444444444",
- "want 1111111111111111111111111111111111111111",
- "want 2222222222222222222222222222222222222222",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- expected := []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- plumbing.NewHash("2222222222222222222222222222222222222222"),
- plumbing.NewHash("3333333333333333333333333333333333333333"),
- plumbing.NewHash("4444444444444444444444444444444444444444"),
- }
-
- sort.Sort(byHash(ur.Wants))
- sort.Sort(byHash(expected))
- c.Assert(ur.Wants, DeepEquals, expected)
-
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
-}
-
-func (s *SuiteDecoder) TestSingleShallowSingleWant(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- expectedWants := []plumbing.Hash{
- plumbing.NewHash("3333333333333333333333333333333333333333"),
- }
-
- expectedShallows := []plumbing.Hash{
- plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"),
- }
-
- c.Assert(ur.Wants, DeepEquals, expectedWants)
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
-
- c.Assert(ur.Shallows, DeepEquals, expectedShallows)
-}
-
-func (s *SuiteDecoder) TestSingleShallowManyWants(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "want 4444444444444444444444444444444444444444",
- "want 1111111111111111111111111111111111111111",
- "want 2222222222222222222222222222222222222222",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- expectedWants := []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- plumbing.NewHash("2222222222222222222222222222222222222222"),
- plumbing.NewHash("3333333333333333333333333333333333333333"),
- plumbing.NewHash("4444444444444444444444444444444444444444"),
- }
- sort.Sort(byHash(expectedWants))
-
- expectedShallows := []plumbing.Hash{
- plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"),
- }
-
- sort.Sort(byHash(ur.Wants))
- c.Assert(ur.Wants, DeepEquals, expectedWants)
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
-
- c.Assert(ur.Shallows, DeepEquals, expectedShallows)
-}
-
-func (s *SuiteDecoder) TestManyShallowSingleWant(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
- "shallow bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
- "shallow cccccccccccccccccccccccccccccccccccccccc",
- "shallow dddddddddddddddddddddddddddddddddddddddd",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- expectedWants := []plumbing.Hash{
- plumbing.NewHash("3333333333333333333333333333333333333333"),
- }
-
- expectedShallows := []plumbing.Hash{
- plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"),
- plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"),
- plumbing.NewHash("cccccccccccccccccccccccccccccccccccccccc"),
- plumbing.NewHash("dddddddddddddddddddddddddddddddddddddddd"),
- }
- sort.Sort(byHash(expectedShallows))
-
- c.Assert(ur.Wants, DeepEquals, expectedWants)
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
-
- sort.Sort(byHash(ur.Shallows))
- c.Assert(ur.Shallows, DeepEquals, expectedShallows)
-}
-
-func (s *SuiteDecoder) TestManyShallowManyWants(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "want 4444444444444444444444444444444444444444",
- "want 1111111111111111111111111111111111111111",
- "want 2222222222222222222222222222222222222222",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
- "shallow bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
- "shallow cccccccccccccccccccccccccccccccccccccccc",
- "shallow dddddddddddddddddddddddddddddddddddddddd",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- expectedWants := []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- plumbing.NewHash("2222222222222222222222222222222222222222"),
- plumbing.NewHash("3333333333333333333333333333333333333333"),
- plumbing.NewHash("4444444444444444444444444444444444444444"),
- }
- sort.Sort(byHash(expectedWants))
-
- expectedShallows := []plumbing.Hash{
- plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"),
- plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"),
- plumbing.NewHash("cccccccccccccccccccccccccccccccccccccccc"),
- plumbing.NewHash("dddddddddddddddddddddddddddddddddddddddd"),
- }
- sort.Sort(byHash(expectedShallows))
-
- sort.Sort(byHash(ur.Wants))
- c.Assert(ur.Wants, DeepEquals, expectedWants)
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
-
- sort.Sort(byHash(ur.Shallows))
- c.Assert(ur.Shallows, DeepEquals, expectedShallows)
-}
-
-func (s *SuiteDecoder) TestMalformedShallow(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "shalow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
-}
-
-func (s *SuiteDecoder) TestMalformedShallowHash(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed hash.*")
-}
-
-func (s *SuiteDecoder) TestMalformedShallowManyShallows(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
- "shalow bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
- "shallow cccccccccccccccccccccccccccccccccccccccc",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
-}
-
-func (s *SuiteDecoder) TestMalformedDeepenSpec(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "deepen-foo 34",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected deepen.*")
-}
-
-func (s *SuiteDecoder) TestMalformedDeepenSingleWant(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "depth 32",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
-}
-
-func (s *SuiteDecoder) TestMalformedDeepenMultiWant(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "want 2222222222222222222222222222222222222222",
- "depth 32",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
-}
-
-func (s *SuiteDecoder) TestMalformedDeepenWithSingleShallow(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "shallow 2222222222222222222222222222222222222222",
- "depth 32",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
-}
-
-func (s *SuiteDecoder) TestMalformedDeepenWithMultiShallow(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "shallow 2222222222222222222222222222222222222222",
- "shallow 5555555555555555555555555555555555555555",
- "depth 32",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
-}
-
-func (s *SuiteDecoder) TestDeepenCommits(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "deepen 1234",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- c.Assert(ur.Depth, FitsTypeOf, DepthCommits(0))
- commits, ok := ur.Depth.(DepthCommits)
- c.Assert(ok, Equals, true)
- c.Assert(int(commits), Equals, 1234)
-}
-
-func (s *SuiteDecoder) TestDeepenCommitsInfiniteInplicit(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "deepen 0",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- c.Assert(ur.Depth, FitsTypeOf, DepthCommits(0))
- commits, ok := ur.Depth.(DepthCommits)
- c.Assert(ok, Equals, true)
- c.Assert(int(commits), Equals, 0)
-}
-
-func (s *SuiteDecoder) TestDeepenCommitsInfiniteExplicit(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- c.Assert(ur.Depth, FitsTypeOf, DepthCommits(0))
- commits, ok := ur.Depth.(DepthCommits)
- c.Assert(ok, Equals, true)
- c.Assert(int(commits), Equals, 0)
-}
-
-func (s *SuiteDecoder) TestMalformedDeepenCommits(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "deepen -32",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*negative depth.*")
-}
-
-func (s *SuiteDecoder) TestDeepenCommitsEmpty(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "deepen ",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid syntax.*")
-}
-
-func (s *SuiteDecoder) TestDeepenSince(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "deepen-since 1420167845", // 2015-01-02T03:04:05+00:00
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- expected := time.Date(2015, time.January, 2, 3, 4, 5, 0, time.UTC)
-
- c.Assert(ur.Depth, FitsTypeOf, DepthSince(time.Now()))
- since, ok := ur.Depth.(DepthSince)
- c.Assert(ok, Equals, true)
- c.Assert(time.Time(since).Equal(expected), Equals, true,
- Commentf("obtained=%s\nexpected=%s", time.Time(since), expected))
-}
-
-func (s *SuiteDecoder) TestDeepenReference(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "deepen-not refs/heads/master",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- expected := "refs/heads/master"
-
- c.Assert(ur.Depth, FitsTypeOf, DepthReference(""))
- reference, ok := ur.Depth.(DepthReference)
- c.Assert(ok, Equals, true)
- c.Assert(string(reference), Equals, expected)
-}
-
-func (s *SuiteDecoder) TestAll(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "want 4444444444444444444444444444444444444444",
- "want 1111111111111111111111111111111111111111",
- "want 2222222222222222222222222222222222222222",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
- "shallow bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
- "shallow cccccccccccccccccccccccccccccccccccccccc",
- "shallow dddddddddddddddddddddddddddddddddddddddd",
- "deepen 1234",
- pktline.FlushString,
- }
- ur := testDecodeOK(c, payloads)
-
- expectedWants := []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- plumbing.NewHash("2222222222222222222222222222222222222222"),
- plumbing.NewHash("3333333333333333333333333333333333333333"),
- plumbing.NewHash("4444444444444444444444444444444444444444"),
- }
- sort.Sort(byHash(expectedWants))
- sort.Sort(byHash(ur.Wants))
- c.Assert(ur.Wants, DeepEquals, expectedWants)
-
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
-
- expectedShallows := []plumbing.Hash{
- plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"),
- plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"),
- plumbing.NewHash("cccccccccccccccccccccccccccccccccccccccc"),
- plumbing.NewHash("dddddddddddddddddddddddddddddddddddddddd"),
- }
- sort.Sort(byHash(expectedShallows))
- sort.Sort(byHash(ur.Shallows))
- c.Assert(ur.Shallows, DeepEquals, expectedShallows)
-
- c.Assert(ur.Depth, FitsTypeOf, DepthCommits(0))
- commits, ok := ur.Depth.(DepthCommits)
- c.Assert(ok, Equals, true)
- c.Assert(int(commits), Equals, 1234)
-}
-
-func (s *SuiteDecoder) TestExtraData(c *C) {
- payloads := []string{
- "want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
- "deepen 32",
- "foo",
- pktline.FlushString,
- }
- r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
-}
diff --git a/plumbing/format/packp/ulreq/encoder.go b/plumbing/format/packp/ulreq/encoder.go
deleted file mode 100644
index 1264e0e..0000000
--- a/plumbing/format/packp/ulreq/encoder.go
+++ /dev/null
@@ -1,140 +0,0 @@
-package ulreq
-
-import (
- "fmt"
- "io"
- "sort"
- "time"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-)
-
-// An Encoder writes UlReq values to an output stream.
-type Encoder struct {
- pe *pktline.Encoder // where to write the encoded data
- data *UlReq // the data to encode
- sortedWants []string
- err error // sticky error
-}
-
-// NewEncoder returns a new encoder that writes to w.
-func NewEncoder(w io.Writer) *Encoder {
- return &Encoder{
- pe: pktline.NewEncoder(w),
- }
-}
-
-// Encode writes the UlReq encoding of v to the stream.
-//
-// All the payloads will end with a newline character. Wants and
-// shallows are sorted alphabetically. A depth of 0 means no depth
-// request is sent.
-func (e *Encoder) Encode(v *UlReq) error {
- if len(v.Wants) == 0 {
- return fmt.Errorf("empty wants provided")
- }
-
- e.data = v
- e.sortedWants = sortHashes(v.Wants)
-
- for state := encodeFirstWant; state != nil; {
- state = state(e)
- }
-
- return e.err
-}
-
-type encoderStateFn func(*Encoder) encoderStateFn
-
-func sortHashes(list []plumbing.Hash) []string {
- sorted := make([]string, len(list))
- for i, hash := range list {
- sorted[i] = hash.String()
- }
- sort.Strings(sorted)
-
- return sorted
-}
-
-func encodeFirstWant(e *Encoder) encoderStateFn {
- var err error
- if e.data.Capabilities.IsEmpty() {
- err = e.pe.Encodef("want %s\n", e.sortedWants[0])
- } else {
- e.data.Capabilities.Sort()
- err = e.pe.Encodef(
- "want %s %s\n",
- e.sortedWants[0],
- e.data.Capabilities.String(),
- )
- }
- if err != nil {
- e.err = fmt.Errorf("encoding first want line: %s", err)
- return nil
- }
-
- return encodeAditionalWants
-}
-
-func encodeAditionalWants(e *Encoder) encoderStateFn {
- for _, w := range e.sortedWants[1:] {
- if err := e.pe.Encodef("want %s\n", w); err != nil {
- e.err = fmt.Errorf("encoding want %q: %s", w, err)
- return nil
- }
- }
-
- return encodeShallows
-}
-
-func encodeShallows(e *Encoder) encoderStateFn {
- sorted := sortHashes(e.data.Shallows)
- for _, s := range sorted {
- if err := e.pe.Encodef("shallow %s\n", s); err != nil {
- e.err = fmt.Errorf("encoding shallow %q: %s", s, err)
- return nil
- }
- }
-
- return encodeDepth
-}
-
-func encodeDepth(e *Encoder) encoderStateFn {
- switch depth := e.data.Depth.(type) {
- case DepthCommits:
- if depth != 0 {
- commits := int(depth)
- if err := e.pe.Encodef("deepen %d\n", commits); err != nil {
- e.err = fmt.Errorf("encoding depth %d: %s", depth, err)
- return nil
- }
- }
- case DepthSince:
- when := time.Time(depth).UTC()
- if err := e.pe.Encodef("deepen-since %d\n", when.Unix()); err != nil {
- e.err = fmt.Errorf("encoding depth %s: %s", when, err)
- return nil
- }
- case DepthReference:
- reference := string(depth)
- if err := e.pe.Encodef("deepen-not %s\n", reference); err != nil {
- e.err = fmt.Errorf("encoding depth %s: %s", reference, err)
- return nil
- }
- default:
- e.err = fmt.Errorf("unsupported depth type")
- return nil
- }
-
- return encodeFlush
-}
-
-func encodeFlush(e *Encoder) encoderStateFn {
- if err := e.pe.Flush(); err != nil {
- e.err = fmt.Errorf("encoding flush-pkt: %s", err)
- return nil
- }
-
- return nil
-}
diff --git a/plumbing/format/packp/ulreq/encoder_test.go b/plumbing/format/packp/ulreq/encoder_test.go
deleted file mode 100644
index 44c6d26..0000000
--- a/plumbing/format/packp/ulreq/encoder_test.go
+++ /dev/null
@@ -1,268 +0,0 @@
-package ulreq
-
-import (
- "bytes"
- "time"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-
- . "gopkg.in/check.v1"
-)
-
-type SuiteEncoder struct{}
-
-var _ = Suite(&SuiteEncoder{})
-
-// returns a byte slice with the pkt-lines for the given payloads.
-func pktlines(c *C, payloads ...string) []byte {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
-
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil, Commentf("building pktlines for %v\n", payloads))
-
- return buf.Bytes()
-}
-
-func testEncode(c *C, ur *UlReq, expectedPayloads []string) {
- var buf bytes.Buffer
- e := NewEncoder(&buf)
-
- err := e.Encode(ur)
- c.Assert(err, IsNil)
- obtained := buf.Bytes()
-
- expected := pktlines(c, expectedPayloads...)
-
- comment := Commentf("\nobtained = %s\nexpected = %s\n", string(obtained), string(expected))
-
- c.Assert(obtained, DeepEquals, expected, comment)
-}
-
-func testEncodeError(c *C, ur *UlReq, expectedErrorRegEx string) {
- var buf bytes.Buffer
- e := NewEncoder(&buf)
-
- err := e.Encode(ur)
- c.Assert(err, ErrorMatches, expectedErrorRegEx)
-}
-
-func (s *SuiteEncoder) TestZeroValue(c *C) {
- ur := New()
- expectedErrorRegEx := ".*empty wants.*"
-
- testEncodeError(c, ur, expectedErrorRegEx)
-}
-
-func (s *SuiteEncoder) TestOneWant(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
-
- expected := []string{
- "want 1111111111111111111111111111111111111111\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestOneWantWithCapabilities(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Capabilities.Add("sysref", "HEAD:/refs/heads/master")
- ur.Capabilities.Add("multi_ack")
- ur.Capabilities.Add("thin-pack")
- ur.Capabilities.Add("side-band")
- ur.Capabilities.Add("ofs-delta")
-
- expected := []string{
- "want 1111111111111111111111111111111111111111 multi_ack ofs-delta side-band sysref=HEAD:/refs/heads/master thin-pack\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestWants(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("4444444444444444444444444444444444444444"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("3333333333333333333333333333333333333333"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("2222222222222222222222222222222222222222"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("5555555555555555555555555555555555555555"))
-
- expected := []string{
- "want 1111111111111111111111111111111111111111\n",
- "want 2222222222222222222222222222222222222222\n",
- "want 3333333333333333333333333333333333333333\n",
- "want 4444444444444444444444444444444444444444\n",
- "want 5555555555555555555555555555555555555555\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestWantsWithCapabilities(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("4444444444444444444444444444444444444444"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("3333333333333333333333333333333333333333"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("2222222222222222222222222222222222222222"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("5555555555555555555555555555555555555555"))
-
- ur.Capabilities.Add("sysref", "HEAD:/refs/heads/master")
- ur.Capabilities.Add("multi_ack")
- ur.Capabilities.Add("thin-pack")
- ur.Capabilities.Add("side-band")
- ur.Capabilities.Add("ofs-delta")
-
- expected := []string{
- "want 1111111111111111111111111111111111111111 multi_ack ofs-delta side-band sysref=HEAD:/refs/heads/master thin-pack\n",
- "want 2222222222222222222222222222222222222222\n",
- "want 3333333333333333333333333333333333333333\n",
- "want 4444444444444444444444444444444444444444\n",
- "want 5555555555555555555555555555555555555555\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestShallow(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Capabilities.Add("multi_ack")
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))
-
- expected := []string{
- "want 1111111111111111111111111111111111111111 multi_ack\n",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestManyShallows(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Capabilities.Add("multi_ack")
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"))
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("dddddddddddddddddddddddddddddddddddddddd"))
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("cccccccccccccccccccccccccccccccccccccccc"))
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))
-
- expected := []string{
- "want 1111111111111111111111111111111111111111 multi_ack\n",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n",
- "shallow bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n",
- "shallow cccccccccccccccccccccccccccccccccccccccc\n",
- "shallow dddddddddddddddddddddddddddddddddddddddd\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestDepthCommits(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Depth = DepthCommits(1234)
-
- expected := []string{
- "want 1111111111111111111111111111111111111111\n",
- "deepen 1234\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestDepthSinceUTC(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- since := time.Date(2015, time.January, 2, 3, 4, 5, 0, time.UTC)
- ur.Depth = DepthSince(since)
-
- expected := []string{
- "want 1111111111111111111111111111111111111111\n",
- "deepen-since 1420167845\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestDepthSinceNonUTC(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- berlin, err := time.LoadLocation("Europe/Berlin")
- c.Assert(err, IsNil)
- since := time.Date(2015, time.January, 2, 3, 4, 5, 0, berlin)
- // since value is 2015-01-02 03:04:05 +0100 UTC (Europe/Berlin) or
- // 2015-01-02 02:04:05 +0000 UTC, which is 1420164245 Unix seconds.
- ur.Depth = DepthSince(since)
-
- expected := []string{
- "want 1111111111111111111111111111111111111111\n",
- "deepen-since 1420164245\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestDepthReference(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Depth = DepthReference("refs/heads/feature-foo")
-
- expected := []string{
- "want 1111111111111111111111111111111111111111\n",
- "deepen-not refs/heads/feature-foo\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
-
-func (s *SuiteEncoder) TestAll(c *C) {
- ur := New()
- ur.Wants = append(ur.Wants, plumbing.NewHash("4444444444444444444444444444444444444444"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("3333333333333333333333333333333333333333"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("2222222222222222222222222222222222222222"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("5555555555555555555555555555555555555555"))
-
- ur.Capabilities.Add("sysref", "HEAD:/refs/heads/master")
- ur.Capabilities.Add("multi_ack")
- ur.Capabilities.Add("thin-pack")
- ur.Capabilities.Add("side-band")
- ur.Capabilities.Add("ofs-delta")
-
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"))
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("dddddddddddddddddddddddddddddddddddddddd"))
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("cccccccccccccccccccccccccccccccccccccccc"))
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))
-
- since := time.Date(2015, time.January, 2, 3, 4, 5, 0, time.UTC)
- ur.Depth = DepthSince(since)
-
- expected := []string{
- "want 1111111111111111111111111111111111111111 multi_ack ofs-delta side-band sysref=HEAD:/refs/heads/master thin-pack\n",
- "want 2222222222222222222222222222222222222222\n",
- "want 3333333333333333333333333333333333333333\n",
- "want 4444444444444444444444444444444444444444\n",
- "want 5555555555555555555555555555555555555555\n",
- "shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n",
- "shallow bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n",
- "shallow cccccccccccccccccccccccccccccccccccccccc\n",
- "shallow dddddddddddddddddddddddddddddddddddddddd\n",
- "deepen-since 1420167845\n",
- pktline.FlushString,
- }
-
- testEncode(c, ur, expected)
-}
diff --git a/plumbing/format/packp/ulreq/ulreq.go b/plumbing/format/packp/ulreq/ulreq.go
deleted file mode 100644
index d2cc7c0..0000000
--- a/plumbing/format/packp/ulreq/ulreq.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Package ulreq implements encoding and decoding upload-request
-// messages from a git-upload-pack command.
-package ulreq
-
-import (
- "time"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
-)
-
-// UlReq values represent the information transmitted on a
-// upload-request message. Values from this type are not zero-value
-// safe, use the New function instead.
-type UlReq struct {
- Capabilities *packp.Capabilities
- Wants []plumbing.Hash
- Shallows []plumbing.Hash
- Depth Depth
-}
-
-// Depth values stores the desired depth of the requested packfile: see
-// DepthCommit, DepthSince and DepthReference.
-type Depth interface {
- isDepth()
-}
-
-// DepthCommits values stores the maximum number of requested commits in
-// the packfile. Zero means infinite. A negative value will have
-// undefined consecuences.
-type DepthCommits int
-
-func (d DepthCommits) isDepth() {}
-
-// DepthSince values requests only commits newer than the specified time.
-type DepthSince time.Time
-
-func (d DepthSince) isDepth() {}
-
-// DepthReference requests only commits not to found in the specified reference.
-type DepthReference string
-
-func (d DepthReference) isDepth() {}
-
-// New returns a pointer to a new UlReq value, ready to be used. It has
-// no capabilities, wants or shallows and an infinite depth. Please
-// note that to encode an upload-request it has to have at least one
-// wanted hash.
-func New() *UlReq {
- return &UlReq{
- Capabilities: packp.NewCapabilities(),
- Wants: []plumbing.Hash{},
- Shallows: []plumbing.Hash{},
- Depth: DepthCommits(0),
- }
-}
diff --git a/plumbing/format/packp/ulreq/ulreq_test.go b/plumbing/format/packp/ulreq/ulreq_test.go
deleted file mode 100644
index 06963ff..0000000
--- a/plumbing/format/packp/ulreq/ulreq_test.go
+++ /dev/null
@@ -1,91 +0,0 @@
-package ulreq
-
-import (
- "fmt"
- "os"
- "strings"
- "testing"
- "time"
-
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-
- . "gopkg.in/check.v1"
-)
-
-func Test(t *testing.T) { TestingT(t) }
-
-func ExampleEncoder_Encode() {
- // Create an empty UlReq with the contents you want...
- ur := New()
-
- // Add a couple of wants
- ur.Wants = append(ur.Wants, plumbing.NewHash("3333333333333333333333333333333333333333"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Wants = append(ur.Wants, plumbing.NewHash("2222222222222222222222222222222222222222"))
-
- // And some capabilities you will like the server to use
- ur.Capabilities.Add("sysref", "HEAD:/refs/heads/master")
- ur.Capabilities.Add("ofs-delta")
-
- // Add a couple of shallows
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"))
- ur.Shallows = append(ur.Shallows, plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))
-
- // And retrict the answer of the server to commits newer than "2015-01-02 03:04:05 UTC"
- since := time.Date(2015, time.January, 2, 3, 4, 5, 0, time.UTC)
- ur.Depth = DepthSince(since)
-
- // Create a new Encode for the stdout...
- e := NewEncoder(os.Stdout)
- // ...and encode the upload-request to it.
- _ = e.Encode(ur) // ignoring errors for brevity
- // Output:
- // 005bwant 1111111111111111111111111111111111111111 ofs-delta sysref=HEAD:/refs/heads/master
- // 0032want 2222222222222222222222222222222222222222
- // 0032want 3333333333333333333333333333333333333333
- // 0035shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
- // 0035shallow bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
- // 001cdeepen-since 1420167845
- // 0000
-}
-
-func ExampleDecoder_Decode() {
- // Here is a raw advertised-ref message.
- raw := "" +
- "005bwant 1111111111111111111111111111111111111111 ofs-delta sysref=HEAD:/refs/heads/master\n" +
- "0032want 2222222222222222222222222222222222222222\n" +
- "0032want 3333333333333333333333333333333333333333\n" +
- "0035shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n" +
- "0035shallow bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n" +
- "001cdeepen-since 1420167845\n" + // 2015-01-02 03:04:05 +0000 UTC
- pktline.FlushString
-
- // Use the raw message as our input.
- input := strings.NewReader(raw)
-
- // Create the Decoder reading from our input.
- d := NewDecoder(input)
-
- // Decode the input into a newly allocated UlReq value.
- ur := New()
- _ = d.Decode(ur) // error check ignored for brevity
-
- // Do something interesting with the UlReq, e.g. print its contents.
- fmt.Println("capabilities =", ur.Capabilities.String())
- fmt.Println("wants =", ur.Wants)
- fmt.Println("shallows =", ur.Shallows)
- switch depth := ur.Depth.(type) {
- case DepthCommits:
- fmt.Println("depth =", int(depth))
- case DepthSince:
- fmt.Println("depth =", time.Time(depth))
- case DepthReference:
- fmt.Println("depth =", string(depth))
- }
- // Output:
- // capabilities = ofs-delta sysref=HEAD:/refs/heads/master
- // wants = [1111111111111111111111111111111111111111 2222222222222222222222222222222222222222 3333333333333333333333333333333333333333]
- // shallows = [aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb]
- // depth = 2015-01-02 03:04:05 +0000 UTC
-}
diff --git a/plumbing/format/packp/pktline/encoder.go b/plumbing/format/pktline/encoder.go
index 0a88a9b..0a88a9b 100644
--- a/plumbing/format/packp/pktline/encoder.go
+++ b/plumbing/format/pktline/encoder.go
diff --git a/plumbing/format/packp/pktline/encoder_test.go b/plumbing/format/pktline/encoder_test.go
index cd97593..d1258af 100644
--- a/plumbing/format/packp/pktline/encoder_test.go
+++ b/plumbing/format/pktline/encoder_test.go
@@ -6,7 +6,7 @@ import (
"strings"
"testing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
)
diff --git a/plumbing/format/packp/pktline/scanner.go b/plumbing/format/pktline/scanner.go
index 3ce2adf..3ce2adf 100644
--- a/plumbing/format/packp/pktline/scanner.go
+++ b/plumbing/format/pktline/scanner.go
diff --git a/plumbing/format/packp/pktline/scanner_test.go b/plumbing/format/pktline/scanner_test.go
index c5395cf..9f440a4 100644
--- a/plumbing/format/packp/pktline/scanner_test.go
+++ b/plumbing/format/pktline/scanner_test.go
@@ -6,7 +6,7 @@ import (
"io"
"strings"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
)