aboutsummaryrefslogtreecommitdiffstats
path: root/plumbing
diff options
context:
space:
mode:
Diffstat (limited to 'plumbing')
-rw-r--r--plumbing/format/pktline/encoder.go (renamed from plumbing/format/packp/pktline/encoder.go)0
-rw-r--r--plumbing/format/pktline/encoder_test.go (renamed from plumbing/format/packp/pktline/encoder_test.go)2
-rw-r--r--plumbing/format/pktline/scanner.go (renamed from plumbing/format/packp/pktline/scanner.go)0
-rw-r--r--plumbing/format/pktline/scanner_test.go (renamed from plumbing/format/packp/pktline/scanner_test.go)2
-rw-r--r--plumbing/protocol/packp/advrefs.go (renamed from plumbing/format/packp/advrefs/advrefs.go)28
-rw-r--r--plumbing/protocol/packp/advrefs_decode.go (renamed from plumbing/format/packp/advrefs/decoder.go)48
-rw-r--r--plumbing/protocol/packp/advrefs_decoder_test.go (renamed from plumbing/format/packp/advrefs/decoder_test.go)163
-rw-r--r--plumbing/protocol/packp/advrefs_encoder.go (renamed from plumbing/format/packp/advrefs/encoder.go)31
-rw-r--r--plumbing/protocol/packp/advrefs_encoder_test.go (renamed from plumbing/format/packp/advrefs/encoder_test.go)154
-rw-r--r--plumbing/protocol/packp/advrefs_test.go (renamed from plumbing/format/packp/advrefs/advrefs_test.go)24
-rw-r--r--plumbing/protocol/packp/capabilities.go (renamed from plumbing/format/packp/capabilities.go)0
-rw-r--r--plumbing/protocol/packp/capabilities_test.go (renamed from plumbing/format/packp/capabilities_test.go)4
-rw-r--r--plumbing/protocol/packp/common.go45
-rw-r--r--plumbing/protocol/packp/common_test.go33
-rw-r--r--plumbing/protocol/packp/doc.go (renamed from plumbing/format/packp/doc.go)0
-rw-r--r--plumbing/protocol/packp/ulreq.go (renamed from plumbing/format/packp/ulreq/ulreq.go)13
-rw-r--r--plumbing/protocol/packp/ulreq_decoder.go (renamed from plumbing/format/packp/ulreq/decoder.go)101
-rw-r--r--plumbing/protocol/packp/ulreq_decoder_test.go (renamed from plumbing/format/packp/ulreq/decoder_test.go)151
-rw-r--r--plumbing/protocol/packp/ulreq_encoder.go (renamed from plumbing/format/packp/ulreq/encoder.go)40
-rw-r--r--plumbing/protocol/packp/ulreq_encoder_test.go (renamed from plumbing/format/packp/ulreq/encoder_test.go)99
-rw-r--r--plumbing/protocol/packp/ulreq_test.go (renamed from plumbing/format/packp/ulreq/ulreq_test.go)21
-rw-r--r--plumbing/transport/fetch_pack.go19
-rw-r--r--plumbing/transport/fetch_pack_test.go4
-rw-r--r--plumbing/transport/http/fetch_pack.go6
-rw-r--r--plumbing/transport/ssh/fetch_pack.go13
25 files changed, 480 insertions, 521 deletions
diff --git a/plumbing/format/packp/pktline/encoder.go b/plumbing/format/pktline/encoder.go
index 0a88a9b..0a88a9b 100644
--- a/plumbing/format/packp/pktline/encoder.go
+++ b/plumbing/format/pktline/encoder.go
diff --git a/plumbing/format/packp/pktline/encoder_test.go b/plumbing/format/pktline/encoder_test.go
index cd97593..d1258af 100644
--- a/plumbing/format/packp/pktline/encoder_test.go
+++ b/plumbing/format/pktline/encoder_test.go
@@ -6,7 +6,7 @@ import (
"strings"
"testing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
)
diff --git a/plumbing/format/packp/pktline/scanner.go b/plumbing/format/pktline/scanner.go
index 3ce2adf..3ce2adf 100644
--- a/plumbing/format/packp/pktline/scanner.go
+++ b/plumbing/format/pktline/scanner.go
diff --git a/plumbing/format/packp/pktline/scanner_test.go b/plumbing/format/pktline/scanner_test.go
index c5395cf..9f440a4 100644
--- a/plumbing/format/packp/pktline/scanner_test.go
+++ b/plumbing/format/pktline/scanner_test.go
@@ -6,7 +6,7 @@ import (
"io"
"strings"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
)
diff --git a/plumbing/format/packp/advrefs/advrefs.go b/plumbing/protocol/packp/advrefs.go
index 4d7c897..c54f9d8 100644
--- a/plumbing/format/packp/advrefs/advrefs.go
+++ b/plumbing/protocol/packp/advrefs.go
@@ -1,25 +1,7 @@
-// Package advrefs implements encoding and decoding advertised-refs
-// messages from a git-upload-pack command.
-package advrefs
+package packp
import (
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
-)
-
-const (
- hashSize = 40
- head = "HEAD"
- noHead = "capabilities^{}"
-)
-
-var (
- sp = []byte(" ")
- null = []byte("\x00")
- eol = []byte("\n")
- peeled = []byte("^{}")
- shallow = []byte("shallow ")
- noHeadMark = []byte(" capabilities^{}\x00")
)
// AdvRefs values represent the information transmitted on an
@@ -40,17 +22,17 @@ var (
type AdvRefs struct {
Prefix [][]byte // payloads of the prefix
Head *plumbing.Hash
- Capabilities *packp.Capabilities
+ Capabilities *Capabilities
References map[string]plumbing.Hash
Peeled map[string]plumbing.Hash
Shallows []plumbing.Hash
}
-// New returns a pointer to a new AdvRefs value, ready to be used.
-func New() *AdvRefs {
+// NewAdvRefs returns a pointer to a new AdvRefs value, ready to be used.
+func NewAdvRefs() *AdvRefs {
return &AdvRefs{
Prefix: [][]byte{},
- Capabilities: packp.NewCapabilities(),
+ Capabilities: NewCapabilities(),
References: make(map[string]plumbing.Hash),
Peeled: make(map[string]plumbing.Hash),
Shallows: []plumbing.Hash{},
diff --git a/plumbing/format/packp/advrefs/decoder.go b/plumbing/protocol/packp/advrefs_decode.go
index c50eeef..df824a9 100644
--- a/plumbing/format/packp/advrefs/decoder.go
+++ b/plumbing/protocol/packp/advrefs_decode.go
@@ -1,4 +1,4 @@
-package advrefs
+package packp
import (
"bytes"
@@ -8,11 +8,11 @@ import (
"io"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
)
-// A Decoder reads and decodes AdvRef values from an input stream.
-type Decoder struct {
+// A AdvRefsDecoder reads and decodes AdvRef values from an input stream.
+type AdvRefsDecoder struct {
s *pktline.Scanner // a pkt-line scanner from the input stream
line []byte // current pkt-line contents, use parser.nextLine() to make it advance
nLine int // current pkt-line number for debugging, begins at 1
@@ -24,18 +24,18 @@ type Decoder struct {
// ErrEmpty is returned by Decode when there was no advertised-message at all
var ErrEmpty = errors.New("empty advertised-ref message")
-// NewDecoder returns a new decoder that reads from r.
+// NewAdvRefsDecoder returns a new decoder that reads from r.
//
// Will not read more data from r than necessary.
-func NewDecoder(r io.Reader) *Decoder {
- return &Decoder{
+func NewAdvRefsDecoder(r io.Reader) *AdvRefsDecoder {
+ return &AdvRefsDecoder{
s: pktline.NewScanner(r),
}
}
// Decode reads the next advertised-refs message form its input and
// stores it in the value pointed to by v.
-func (d *Decoder) Decode(v *AdvRefs) error {
+func (d *AdvRefsDecoder) Decode(v *AdvRefs) error {
d.data = v
for state := decodePrefix; state != nil; {
@@ -45,10 +45,10 @@ func (d *Decoder) Decode(v *AdvRefs) error {
return d.err
}
-type decoderStateFn func(*Decoder) decoderStateFn
+type decoderStateFn func(*AdvRefsDecoder) decoderStateFn
// fills out the parser stiky error
-func (d *Decoder) error(format string, a ...interface{}) {
+func (d *AdvRefsDecoder) error(format string, a ...interface{}) {
d.err = fmt.Errorf("pkt-line %d: %s", d.nLine,
fmt.Sprintf(format, a...))
}
@@ -57,7 +57,7 @@ func (d *Decoder) error(format string, a ...interface{}) {
// p.line and increments p.nLine. A successful invocation returns true,
// otherwise, false is returned and the sticky error is filled out
// accordingly. Trims eols at the end of the payloads.
-func (d *Decoder) nextLine() bool {
+func (d *AdvRefsDecoder) nextLine() bool {
d.nLine++
if !d.s.Scan() {
@@ -81,7 +81,7 @@ func (d *Decoder) nextLine() bool {
}
// The HTTP smart prefix is often followed by a flush-pkt.
-func decodePrefix(d *Decoder) decoderStateFn {
+func decodePrefix(d *AdvRefsDecoder) decoderStateFn {
if ok := d.nextLine(); !ok {
return nil
}
@@ -122,7 +122,7 @@ func isFlush(payload []byte) bool {
// If the first hash is zero, then a no-refs is comming. Otherwise, a
// list-of-refs is comming, and the hash will be followed by the first
// advertised ref.
-func decodeFirstHash(p *Decoder) decoderStateFn {
+func decodeFirstHash(p *AdvRefsDecoder) decoderStateFn {
// If the repository is empty, we receive a flush here (HTTP).
if isFlush(p.line) {
p.err = ErrEmpty
@@ -149,7 +149,7 @@ func decodeFirstHash(p *Decoder) decoderStateFn {
}
// Skips SP "capabilities^{}" NUL
-func decodeSkipNoRefs(p *Decoder) decoderStateFn {
+func decodeSkipNoRefs(p *AdvRefsDecoder) decoderStateFn {
if len(p.line) < len(noHeadMark) {
p.error("too short zero-id ref")
return nil
@@ -166,7 +166,7 @@ func decodeSkipNoRefs(p *Decoder) decoderStateFn {
}
// decode the refname, expectes SP refname NULL
-func decodeFirstRef(l *Decoder) decoderStateFn {
+func decodeFirstRef(l *AdvRefsDecoder) decoderStateFn {
if len(l.line) < 3 {
l.error("line too short after hash")
return nil
@@ -195,7 +195,7 @@ func decodeFirstRef(l *Decoder) decoderStateFn {
return decodeCaps
}
-func decodeCaps(p *Decoder) decoderStateFn {
+func decodeCaps(p *AdvRefsDecoder) decoderStateFn {
if len(p.line) == 0 {
return decodeOtherRefs
}
@@ -208,21 +208,9 @@ func decodeCaps(p *Decoder) decoderStateFn {
return decodeOtherRefs
}
-// Capabilities are a single string or a name=value.
-// Even though we are only going to read at moust 1 value, we return
-// a slice of values, as Capability.Add receives that.
-func readCapability(data []byte) (name string, values []string) {
- pair := bytes.SplitN(data, []byte{'='}, 2)
- if len(pair) == 2 {
- values = append(values, string(pair[1]))
- }
-
- return string(pair[0]), values
-}
-
// The refs are either tips (obj-id SP refname) or a peeled (obj-id SP refname^{}).
// If there are no refs, then there might be a shallow or flush-ptk.
-func decodeOtherRefs(p *Decoder) decoderStateFn {
+func decodeOtherRefs(p *AdvRefsDecoder) decoderStateFn {
if ok := p.nextLine(); !ok {
return nil
}
@@ -265,7 +253,7 @@ func readRef(data []byte) (string, plumbing.Hash, error) {
}
// Keeps reading shallows until a flush-pkt is found
-func decodeShallow(p *Decoder) decoderStateFn {
+func decodeShallow(p *AdvRefsDecoder) decoderStateFn {
if !bytes.HasPrefix(p.line, shallow) {
p.error("malformed shallow prefix, found %q... instead", p.line[:len(shallow)])
return nil
diff --git a/plumbing/format/packp/advrefs/decoder_test.go b/plumbing/protocol/packp/advrefs_decoder_test.go
index bacf79a..ee72d56 100644
--- a/plumbing/format/packp/advrefs/decoder_test.go
+++ b/plumbing/protocol/packp/advrefs_decoder_test.go
@@ -1,4 +1,4 @@
-package advrefs_test
+package packp
import (
"bytes"
@@ -6,104 +6,93 @@ import (
"strings"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
)
-type SuiteDecoder struct{}
+type AdvRefsDecodeSuite struct{}
-var _ = Suite(&SuiteDecoder{})
+var _ = Suite(&AdvRefsDecodeSuite{})
-func (s *SuiteDecoder) TestEmpty(c *C) {
- ar := advrefs.New()
+func (s *AdvRefsDecodeSuite) TestEmpty(c *C) {
+ ar := NewAdvRefs()
var buf bytes.Buffer
- d := advrefs.NewDecoder(&buf)
+ d := NewAdvRefsDecoder(&buf)
err := d.Decode(ar)
- c.Assert(err, Equals, advrefs.ErrEmpty)
+ c.Assert(err, Equals, ErrEmpty)
}
-func (s *SuiteDecoder) TestEmptyFlush(c *C) {
- ar := advrefs.New()
+func (s *AdvRefsDecodeSuite) TestEmptyFlush(c *C) {
+ ar := NewAdvRefs()
var buf bytes.Buffer
e := pktline.NewEncoder(&buf)
e.Flush()
- d := advrefs.NewDecoder(&buf)
+ d := NewAdvRefsDecoder(&buf)
err := d.Decode(ar)
- c.Assert(err, Equals, advrefs.ErrEmpty)
+ c.Assert(err, Equals, ErrEmpty)
}
-func (s *SuiteDecoder) TestEmptyPrefixFlush(c *C) {
- ar := advrefs.New()
+func (s *AdvRefsDecodeSuite) TestEmptyPrefixFlush(c *C) {
+ ar := NewAdvRefs()
var buf bytes.Buffer
e := pktline.NewEncoder(&buf)
e.EncodeString("# service=git-upload-pack")
e.Flush()
e.Flush()
- d := advrefs.NewDecoder(&buf)
+ d := NewAdvRefsDecoder(&buf)
err := d.Decode(ar)
- c.Assert(err, Equals, advrefs.ErrEmpty)
+ c.Assert(err, Equals, ErrEmpty)
}
-func (s *SuiteDecoder) TestShortForHash(c *C) {
+func (s *AdvRefsDecodeSuite) TestShortForHash(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*too short")
+ s.testDecoderErrorMatches(c, r, ".*too short")
}
-func toPktLines(c *C, payloads []string) io.Reader {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil)
-
- return &buf
-}
-
-func testDecoderErrorMatches(c *C, input io.Reader, pattern string) {
- ar := advrefs.New()
- d := advrefs.NewDecoder(input)
+func (s *AdvRefsDecodeSuite) testDecoderErrorMatches(c *C, input io.Reader, pattern string) {
+ ar := NewAdvRefs()
+ d := NewAdvRefsDecoder(input)
err := d.Decode(ar)
c.Assert(err, ErrorMatches, pattern)
}
-func (s *SuiteDecoder) TestInvalidFirstHash(c *C) {
+func (s *AdvRefsDecodeSuite) TestInvalidFirstHash(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796alberto2219af86ec6584e5 HEAD\x00multi_ack thin-pack\n",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid hash.*")
+ s.testDecoderErrorMatches(c, r, ".*invalid hash.*")
}
-func (s *SuiteDecoder) TestZeroId(c *C) {
+func (s *AdvRefsDecodeSuite) TestZeroId(c *C) {
payloads := []string{
"0000000000000000000000000000000000000000 capabilities^{}\x00multi_ack thin-pack\n",
pktline.FlushString,
}
- ar := testDecodeOK(c, payloads)
+ ar := s.testDecodeOK(c, payloads)
c.Assert(ar.Head, IsNil)
}
-func testDecodeOK(c *C, payloads []string) *advrefs.AdvRefs {
+func (s *AdvRefsDecodeSuite) testDecodeOK(c *C, payloads []string) *AdvRefs {
var buf bytes.Buffer
e := pktline.NewEncoder(&buf)
err := e.EncodeString(payloads...)
c.Assert(err, IsNil)
- ar := advrefs.New()
- d := advrefs.NewDecoder(&buf)
+ ar := NewAdvRefs()
+ d := NewAdvRefsDecoder(&buf)
err = d.Decode(ar)
c.Assert(err, IsNil)
@@ -111,106 +100,106 @@ func testDecodeOK(c *C, payloads []string) *advrefs.AdvRefs {
return ar
}
-func (s *SuiteDecoder) TestMalformedZeroId(c *C) {
+func (s *AdvRefsDecodeSuite) TestMalformedZeroId(c *C) {
payloads := []string{
"0000000000000000000000000000000000000000 wrong\x00multi_ack thin-pack\n",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed zero-id.*")
+ s.testDecoderErrorMatches(c, r, ".*malformed zero-id.*")
}
-func (s *SuiteDecoder) TestShortZeroId(c *C) {
+func (s *AdvRefsDecodeSuite) TestShortZeroId(c *C) {
payloads := []string{
"0000000000000000000000000000000000000000 capabi",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*too short zero-id.*")
+ s.testDecoderErrorMatches(c, r, ".*too short zero-id.*")
}
-func (s *SuiteDecoder) TestHead(c *C) {
+func (s *AdvRefsDecodeSuite) TestHead(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
pktline.FlushString,
}
- ar := testDecodeOK(c, payloads)
+ ar := s.testDecodeOK(c, payloads)
c.Assert(*ar.Head, Equals,
plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"))
}
-func (s *SuiteDecoder) TestFirstIsNotHead(c *C) {
+func (s *AdvRefsDecodeSuite) TestFirstIsNotHead(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 refs/heads/master\x00",
pktline.FlushString,
}
- ar := testDecodeOK(c, payloads)
+ ar := s.testDecodeOK(c, payloads)
c.Assert(ar.Head, IsNil)
c.Assert(ar.References["refs/heads/master"], Equals,
plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"))
}
-func (s *SuiteDecoder) TestShortRef(c *C) {
+func (s *AdvRefsDecodeSuite) TestShortRef(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 H",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*too short.*")
+ s.testDecoderErrorMatches(c, r, ".*too short.*")
}
-func (s *SuiteDecoder) TestNoNULL(c *C) {
+func (s *AdvRefsDecodeSuite) TestNoNULL(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEADofs-delta multi_ack",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*NULL not found.*")
+ s.testDecoderErrorMatches(c, r, ".*NULL not found.*")
}
-func (s *SuiteDecoder) TestNoSpaceAfterHash(c *C) {
+func (s *AdvRefsDecodeSuite) TestNoSpaceAfterHash(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5-HEAD\x00",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*no space after hash.*")
+ s.testDecoderErrorMatches(c, r, ".*no space after hash.*")
}
-func (s *SuiteDecoder) TestNoCaps(c *C) {
+func (s *AdvRefsDecodeSuite) TestNoCaps(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
pktline.FlushString,
}
- ar := testDecodeOK(c, payloads)
+ ar := s.testDecodeOK(c, payloads)
c.Assert(ar.Capabilities.IsEmpty(), Equals, true)
}
-func (s *SuiteDecoder) TestCaps(c *C) {
+func (s *AdvRefsDecodeSuite) TestCaps(c *C) {
for _, test := range [...]struct {
input []string
- capabilities []packp.Capability
+ capabilities []Capability
}{
{
input: []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
pktline.FlushString,
},
- capabilities: []packp.Capability{},
+ capabilities: []Capability{},
},
{
input: []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n",
pktline.FlushString,
},
- capabilities: []packp.Capability{},
+ capabilities: []Capability{},
},
{
input: []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta",
pktline.FlushString,
},
- capabilities: []packp.Capability{
+ capabilities: []Capability{
{
Name: "ofs-delta",
Values: []string(nil),
@@ -222,7 +211,7 @@ func (s *SuiteDecoder) TestCaps(c *C) {
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack",
pktline.FlushString,
},
- capabilities: []packp.Capability{
+ capabilities: []Capability{
{Name: "ofs-delta", Values: []string(nil)},
{Name: "multi_ack", Values: []string(nil)},
},
@@ -232,7 +221,7 @@ func (s *SuiteDecoder) TestCaps(c *C) {
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack\n",
pktline.FlushString,
},
- capabilities: []packp.Capability{
+ capabilities: []Capability{
{Name: "ofs-delta", Values: []string(nil)},
{Name: "multi_ack", Values: []string(nil)},
},
@@ -242,7 +231,7 @@ func (s *SuiteDecoder) TestCaps(c *C) {
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:refs/heads/master agent=foo=bar\n",
pktline.FlushString,
},
- capabilities: []packp.Capability{
+ capabilities: []Capability{
{Name: "symref", Values: []string{"HEAD:refs/heads/master"}},
{Name: "agent", Values: []string{"foo=bar"}},
},
@@ -252,13 +241,13 @@ func (s *SuiteDecoder) TestCaps(c *C) {
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:refs/heads/master agent=foo=bar agent=new-agent\n",
pktline.FlushString,
},
- capabilities: []packp.Capability{
+ capabilities: []Capability{
{Name: "symref", Values: []string{"HEAD:refs/heads/master"}},
{Name: "agent", Values: []string{"foo=bar", "new-agent"}},
},
},
} {
- ar := testDecodeOK(c, test.input)
+ ar := s.testDecodeOK(c, test.input)
for _, fixCap := range test.capabilities {
c.Assert(ar.Capabilities.Supports(fixCap.Name), Equals, true,
Commentf("input = %q, capability = %q", test.input, fixCap.Name))
@@ -268,31 +257,31 @@ func (s *SuiteDecoder) TestCaps(c *C) {
}
}
-func (s *SuiteDecoder) TestWithPrefix(c *C) {
+func (s *AdvRefsDecodeSuite) TestWithPrefix(c *C) {
payloads := []string{
"# this is a prefix\n",
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00foo\n",
pktline.FlushString,
}
- ar := testDecodeOK(c, payloads)
+ ar := s.testDecodeOK(c, payloads)
c.Assert(len(ar.Prefix), Equals, 1)
c.Assert(ar.Prefix[0], DeepEquals, []byte("# this is a prefix"))
}
-func (s *SuiteDecoder) TestWithPrefixAndFlush(c *C) {
+func (s *AdvRefsDecodeSuite) TestWithPrefixAndFlush(c *C) {
payloads := []string{
"# this is a prefix\n",
pktline.FlushString,
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00foo\n",
pktline.FlushString,
}
- ar := testDecodeOK(c, payloads)
+ ar := s.testDecodeOK(c, payloads)
c.Assert(len(ar.Prefix), Equals, 2)
c.Assert(ar.Prefix[0], DeepEquals, []byte("# this is a prefix"))
c.Assert(ar.Prefix[1], DeepEquals, []byte(pktline.FlushString))
}
-func (s *SuiteDecoder) TestOtherRefs(c *C) {
+func (s *AdvRefsDecodeSuite) TestOtherRefs(c *C) {
for _, test := range [...]struct {
input []string
references map[string]plumbing.Hash
@@ -389,34 +378,34 @@ func (s *SuiteDecoder) TestOtherRefs(c *C) {
},
},
} {
- ar := testDecodeOK(c, test.input)
+ ar := s.testDecodeOK(c, test.input)
comment := Commentf("input = %v\n", test.input)
c.Assert(ar.References, DeepEquals, test.references, comment)
c.Assert(ar.Peeled, DeepEquals, test.peeled, comment)
}
}
-func (s *SuiteDecoder) TestMalformedOtherRefsNoSpace(c *C) {
+func (s *AdvRefsDecodeSuite) TestMalformedOtherRefsNoSpace(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack thin-pack\n",
"5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8crefs/tags/v2.6.11\n",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed ref data.*")
+ s.testDecoderErrorMatches(c, r, ".*malformed ref data.*")
}
-func (s *SuiteDecoder) TestMalformedOtherRefsMultipleSpaces(c *C) {
+func (s *AdvRefsDecodeSuite) TestMalformedOtherRefsMultipleSpaces(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack thin-pack\n",
"5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags v2.6.11\n",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed ref data.*")
+ s.testDecoderErrorMatches(c, r, ".*malformed ref data.*")
}
-func (s *SuiteDecoder) TestShallow(c *C) {
+func (s *AdvRefsDecodeSuite) TestShallow(c *C) {
for _, test := range [...]struct {
input []string
shallows []plumbing.Hash
@@ -456,13 +445,13 @@ func (s *SuiteDecoder) TestShallow(c *C) {
},
},
} {
- ar := testDecodeOK(c, test.input)
+ ar := s.testDecodeOK(c, test.input)
comment := Commentf("input = %v\n", test.input)
c.Assert(ar.Shallows, DeepEquals, test.shallows, comment)
}
}
-func (s *SuiteDecoder) TestInvalidShallowHash(c *C) {
+func (s *AdvRefsDecodeSuite) TestInvalidShallowHash(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
"a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
@@ -473,10 +462,10 @@ func (s *SuiteDecoder) TestInvalidShallowHash(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid hash text.*")
+ s.testDecoderErrorMatches(c, r, ".*invalid hash text.*")
}
-func (s *SuiteDecoder) TestGarbageAfterShallow(c *C) {
+func (s *AdvRefsDecodeSuite) TestGarbageAfterShallow(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
"a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
@@ -488,10 +477,10 @@ func (s *SuiteDecoder) TestGarbageAfterShallow(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed shallow prefix.*")
+ s.testDecoderErrorMatches(c, r, ".*malformed shallow prefix.*")
}
-func (s *SuiteDecoder) TestMalformedShallowHash(c *C) {
+func (s *AdvRefsDecodeSuite) TestMalformedShallowHash(c *C) {
payloads := []string{
"6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
"a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
@@ -502,19 +491,19 @@ func (s *SuiteDecoder) TestMalformedShallowHash(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed shallow hash.*")
+ s.testDecoderErrorMatches(c, r, ".*malformed shallow hash.*")
}
-func (s *SuiteDecoder) TestEOFRefs(c *C) {
+func (s *AdvRefsDecodeSuite) TestEOFRefs(c *C) {
input := strings.NewReader("" +
"005b6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n" +
"003fa6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n" +
"00355dc01c595e6c6ec9ccda4f6ffbf614e4d92bb0c7 refs/foo\n",
)
- testDecoderErrorMatches(c, input, ".*invalid pkt-len.*")
+ s.testDecoderErrorMatches(c, input, ".*invalid pkt-len.*")
}
-func (s *SuiteDecoder) TestEOFShallows(c *C) {
+func (s *AdvRefsDecodeSuite) TestEOFShallows(c *C) {
input := strings.NewReader("" +
"005b6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n" +
"003fa6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n" +
@@ -522,5 +511,5 @@ func (s *SuiteDecoder) TestEOFShallows(c *C) {
"0047c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n" +
"0035shallow 1111111111111111111111111111111111111111\n" +
"0034shallow 222222222222222222222222")
- testDecoderErrorMatches(c, input, ".*unexpected EOF.*")
+ s.testDecoderErrorMatches(c, input, ".*unexpected EOF.*")
}
diff --git a/plumbing/format/packp/advrefs/encoder.go b/plumbing/protocol/packp/advrefs_encoder.go
index 8c52f14..cdba188 100644
--- a/plumbing/format/packp/advrefs/encoder.go
+++ b/plumbing/protocol/packp/advrefs_encoder.go
@@ -1,4 +1,4 @@
-package advrefs
+package packp
import (
"bytes"
@@ -6,20 +6,19 @@ import (
"sort"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
)
-// An Encoder writes AdvRefs values to an output stream.
-type Encoder struct {
+// An AdvRefsEncoder writes AdvRefs values to an output stream.
+type AdvRefsEncoder struct {
data *AdvRefs // data to encode
pe *pktline.Encoder // where to write the encoded data
err error // sticky error
}
-// NewEncoder returns a new encoder that writes to w.
-func NewEncoder(w io.Writer) *Encoder {
- return &Encoder{
+// NewAdvRefsEncoder returns a new encoder that writes to w.
+func NewAdvRefsEncoder(w io.Writer) *AdvRefsEncoder {
+ return &AdvRefsEncoder{
pe: pktline.NewEncoder(w),
}
}
@@ -29,7 +28,7 @@ func NewEncoder(w io.Writer) *Encoder {
// All the payloads will end with a newline character. Capabilities,
// references and shallows are writen in alphabetical order, except for
// peeled references that always follow their corresponding references.
-func (e *Encoder) Encode(v *AdvRefs) error {
+func (e *AdvRefsEncoder) Encode(v *AdvRefs) error {
e.data = v
for state := encodePrefix; state != nil; {
@@ -39,9 +38,9 @@ func (e *Encoder) Encode(v *AdvRefs) error {
return e.err
}
-type encoderStateFn func(*Encoder) encoderStateFn
+type encoderStateFn func(*AdvRefsEncoder) encoderStateFn
-func encodePrefix(e *Encoder) encoderStateFn {
+func encodePrefix(e *AdvRefsEncoder) encoderStateFn {
for _, p := range e.data.Prefix {
if bytes.Equal(p, pktline.Flush) {
if e.err = e.pe.Flush(); e.err != nil {
@@ -59,7 +58,7 @@ func encodePrefix(e *Encoder) encoderStateFn {
// Adds the first pkt-line payload: head hash, head ref and capabilities.
// Also handle the special case when no HEAD ref is found.
-func encodeFirstLine(e *Encoder) encoderStateFn {
+func encodeFirstLine(e *AdvRefsEncoder) encoderStateFn {
head := formatHead(e.data.Head)
separator := formatSeparator(e.data.Head)
capabilities := formatCaps(e.data.Capabilities)
@@ -87,7 +86,7 @@ func formatSeparator(h *plumbing.Hash) string {
return head
}
-func formatCaps(c *packp.Capabilities) string {
+func formatCaps(c *Capabilities) string {
if c == nil {
return ""
}
@@ -99,7 +98,7 @@ func formatCaps(c *packp.Capabilities) string {
// Adds the (sorted) refs: hash SP refname EOL
// and their peeled refs if any.
-func encodeRefs(e *Encoder) encoderStateFn {
+func encodeRefs(e *AdvRefsEncoder) encoderStateFn {
refs := sortRefs(e.data.References)
for _, r := range refs {
hash, _ := e.data.References[r]
@@ -128,7 +127,7 @@ func sortRefs(m map[string]plumbing.Hash) []string {
}
// Adds the (sorted) shallows: "shallow" SP hash EOL
-func encodeShallow(e *Encoder) encoderStateFn {
+func encodeShallow(e *AdvRefsEncoder) encoderStateFn {
sorted := sortShallows(e.data.Shallows)
for _, hash := range sorted {
if e.err = e.pe.Encodef("shallow %s\n", hash); e.err != nil {
@@ -149,7 +148,7 @@ func sortShallows(c []plumbing.Hash) []string {
return ret
}
-func encodeFlush(e *Encoder) encoderStateFn {
+func encodeFlush(e *AdvRefsEncoder) encoderStateFn {
e.err = e.pe.Flush()
return nil
}
diff --git a/plumbing/format/packp/advrefs/encoder_test.go b/plumbing/protocol/packp/advrefs_encoder_test.go
index b4b085c..222a267 100644
--- a/plumbing/format/packp/advrefs/encoder_test.go
+++ b/plumbing/protocol/packp/advrefs_encoder_test.go
@@ -1,34 +1,22 @@
-package advrefs_test
+package packp
import (
"bytes"
"strings"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
)
-type SuiteEncoder struct{}
+type AdvRefsEncodeSuite struct{}
-var _ = Suite(&SuiteEncoder{})
+var _ = Suite(&AdvRefsEncodeSuite{})
-// returns a byte slice with the pkt-lines for the given payloads.
-func pktlines(c *C, payloads ...[]byte) []byte {
+func testEncode(c *C, input *AdvRefs, expected []byte) {
var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.Encode(payloads...)
- c.Assert(err, IsNil, Commentf("building pktlines for %v\n", payloads))
-
- return buf.Bytes()
-}
-
-func testEncode(c *C, input *advrefs.AdvRefs, expected []byte) {
- var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
+ e := NewAdvRefsEncoder(&buf)
err := e.Encode(input)
c.Assert(err, IsNil)
obtained := buf.Bytes()
@@ -38,68 +26,68 @@ func testEncode(c *C, input *advrefs.AdvRefs, expected []byte) {
c.Assert(obtained, DeepEquals, expected, comment)
}
-func (s *SuiteEncoder) TestZeroValue(c *C) {
- ar := &advrefs.AdvRefs{}
+func (s *AdvRefsEncodeSuite) TestZeroValue(c *C) {
+ ar := &AdvRefs{}
expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- pktline.Flush,
+ "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
+ pktline.FlushString,
)
testEncode(c, ar, expected)
}
-func (s *SuiteEncoder) TestHead(c *C) {
+func (s *AdvRefsEncodeSuite) TestHead(c *C) {
hash := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- ar := &advrefs.AdvRefs{
+ ar := &AdvRefs{
Head: &hash,
}
expected := pktlines(c,
- []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n"),
- pktline.Flush,
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n",
+ pktline.FlushString,
)
testEncode(c, ar, expected)
}
-func (s *SuiteEncoder) TestCapsNoHead(c *C) {
- capabilities := packp.NewCapabilities()
+func (s *AdvRefsEncodeSuite) TestCapsNoHead(c *C) {
+ capabilities := NewCapabilities()
capabilities.Add("symref", "HEAD:/refs/heads/master")
capabilities.Add("ofs-delta")
capabilities.Add("multi_ack")
- ar := &advrefs.AdvRefs{
+ ar := &AdvRefs{
Capabilities: capabilities,
}
expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"),
- pktline.Flush,
+ "0000000000000000000000000000000000000000 capabilities^{}\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
+ pktline.FlushString,
)
testEncode(c, ar, expected)
}
-func (s *SuiteEncoder) TestCapsWithHead(c *C) {
+func (s *AdvRefsEncodeSuite) TestCapsWithHead(c *C) {
hash := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- capabilities := packp.NewCapabilities()
+ capabilities := NewCapabilities()
capabilities.Add("symref", "HEAD:/refs/heads/master")
capabilities.Add("ofs-delta")
capabilities.Add("multi_ack")
- ar := &advrefs.AdvRefs{
+ ar := &AdvRefs{
Head: &hash,
Capabilities: capabilities,
}
expected := pktlines(c,
- []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"),
- pktline.Flush,
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
+ pktline.FlushString,
)
testEncode(c, ar, expected)
}
-func (s *SuiteEncoder) TestRefs(c *C) {
+func (s *AdvRefsEncodeSuite) TestRefs(c *C) {
references := map[string]plumbing.Hash{
"refs/heads/master": plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
"refs/tags/v2.6.12-tree": plumbing.NewHash("1111111111111111111111111111111111111111"),
@@ -107,24 +95,24 @@ func (s *SuiteEncoder) TestRefs(c *C) {
"refs/tags/v2.6.13-tree": plumbing.NewHash("2222222222222222222222222222222222222222"),
"refs/tags/v2.6.11-tree": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
}
- ar := &advrefs.AdvRefs{
+ ar := &AdvRefs{
References: references,
}
expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"),
- []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"),
- []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"),
- []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"),
- []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"),
- pktline.Flush,
+ "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
+ "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
+ "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
+ "1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n",
+ "2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n",
+ "3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n",
+ pktline.FlushString,
)
testEncode(c, ar, expected)
}
-func (s *SuiteEncoder) TestPeeled(c *C) {
+func (s *AdvRefsEncodeSuite) TestPeeled(c *C) {
references := map[string]plumbing.Hash{
"refs/heads/master": plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
"refs/tags/v2.6.12-tree": plumbing.NewHash("1111111111111111111111111111111111111111"),
@@ -136,53 +124,53 @@ func (s *SuiteEncoder) TestPeeled(c *C) {
"refs/tags/v2.7.13-tree": plumbing.NewHash("4444444444444444444444444444444444444444"),
"refs/tags/v2.6.12-tree": plumbing.NewHash("5555555555555555555555555555555555555555"),
}
- ar := &advrefs.AdvRefs{
+ ar := &AdvRefs{
References: references,
Peeled: peeled,
}
expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"),
- []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"),
- []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"),
- []byte("5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n"),
- []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"),
- []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"),
- []byte("4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n"),
- pktline.Flush,
+ "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
+ "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
+ "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
+ "1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n",
+ "5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n",
+ "2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n",
+ "3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n",
+ "4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n",
+ pktline.FlushString,
)
testEncode(c, ar, expected)
}
-func (s *SuiteEncoder) TestShallow(c *C) {
+func (s *AdvRefsEncodeSuite) TestShallow(c *C) {
shallows := []plumbing.Hash{
plumbing.NewHash("1111111111111111111111111111111111111111"),
plumbing.NewHash("4444444444444444444444444444444444444444"),
plumbing.NewHash("3333333333333333333333333333333333333333"),
plumbing.NewHash("2222222222222222222222222222222222222222"),
}
- ar := &advrefs.AdvRefs{
+ ar := &AdvRefs{
Shallows: shallows,
}
expected := pktlines(c,
- []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"),
- []byte("shallow 1111111111111111111111111111111111111111\n"),
- []byte("shallow 2222222222222222222222222222222222222222\n"),
- []byte("shallow 3333333333333333333333333333333333333333\n"),
- []byte("shallow 4444444444444444444444444444444444444444\n"),
- pktline.Flush,
+ "0000000000000000000000000000000000000000 capabilities^{}\x00\n",
+ "shallow 1111111111111111111111111111111111111111\n",
+ "shallow 2222222222222222222222222222222222222222\n",
+ "shallow 3333333333333333333333333333333333333333\n",
+ "shallow 4444444444444444444444444444444444444444\n",
+ pktline.FlushString,
)
testEncode(c, ar, expected)
}
-func (s *SuiteEncoder) TestAll(c *C) {
+func (s *AdvRefsEncodeSuite) TestAll(c *C) {
hash := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- capabilities := packp.NewCapabilities()
+ capabilities := NewCapabilities()
capabilities.Add("symref", "HEAD:/refs/heads/master")
capabilities.Add("ofs-delta")
capabilities.Add("multi_ack")
@@ -207,7 +195,7 @@ func (s *SuiteEncoder) TestAll(c *C) {
plumbing.NewHash("2222222222222222222222222222222222222222"),
}
- ar := &advrefs.AdvRefs{
+ ar := &AdvRefs{
Head: &hash,
Capabilities: capabilities,
References: references,
@@ -216,34 +204,34 @@ func (s *SuiteEncoder) TestAll(c *C) {
}
expected := pktlines(c,
- []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"),
- []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"),
- []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"),
- []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"),
- []byte("5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n"),
- []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"),
- []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"),
- []byte("4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n"),
- []byte("shallow 1111111111111111111111111111111111111111\n"),
- []byte("shallow 2222222222222222222222222222222222222222\n"),
- []byte("shallow 3333333333333333333333333333333333333333\n"),
- []byte("shallow 4444444444444444444444444444444444444444\n"),
- pktline.Flush,
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
+ "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
+ "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
+ "1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n",
+ "5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n",
+ "2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n",
+ "3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n",
+ "4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n",
+ "shallow 1111111111111111111111111111111111111111\n",
+ "shallow 2222222222222222222222222222222222222222\n",
+ "shallow 3333333333333333333333333333333333333333\n",
+ "shallow 4444444444444444444444444444444444444444\n",
+ pktline.FlushString,
)
testEncode(c, ar, expected)
}
-func (s *SuiteEncoder) TestErrorTooLong(c *C) {
+func (s *AdvRefsEncodeSuite) TestErrorTooLong(c *C) {
references := map[string]plumbing.Hash{
strings.Repeat("a", pktline.MaxPayloadSize): plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
}
- ar := &advrefs.AdvRefs{
+ ar := &AdvRefs{
References: references,
}
var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
+ e := NewAdvRefsEncoder(&buf)
err := e.Encode(ar)
c.Assert(err, ErrorMatches, ".*payload is too long.*")
}
diff --git a/plumbing/format/packp/advrefs/advrefs_test.go b/plumbing/protocol/packp/advrefs_test.go
index 2639b6e..1a696d4 100644
--- a/plumbing/format/packp/advrefs/advrefs_test.go
+++ b/plumbing/protocol/packp/advrefs_test.go
@@ -1,4 +1,4 @@
-package advrefs_test
+package packp_test
import (
"bytes"
@@ -8,8 +8,8 @@ import (
"testing"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp"
. "gopkg.in/check.v1"
)
@@ -43,13 +43,13 @@ func (s *SuiteDecodeEncode) test(c *C, in []string, exp []string) {
var obtained []byte
{
- ar := advrefs.New()
- d := advrefs.NewDecoder(input)
+ ar := packp.NewAdvRefs()
+ d := packp.NewAdvRefsDecoder(input)
err = d.Decode(ar)
c.Assert(err, IsNil)
var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
+ e := packp.NewAdvRefsEncoder(&buf)
err := e.Encode(ar)
c.Assert(err, IsNil)
@@ -258,11 +258,11 @@ func ExampleDecoder_Decode() {
// Use the raw message as our input.
input := strings.NewReader(raw)
- // Create a advref.Decoder reading from our input.
- d := advrefs.NewDecoder(input)
+ // Create a Decoder reading from our input.
+ d := packp.NewAdvRefsDecoder(input)
// Decode the input into a newly allocated AdvRefs value.
- ar := advrefs.New()
+ ar := packp.NewAdvRefs()
_ = d.Decode(ar) // error check ignored for brevity
// Do something interesting with the AdvRefs, e.g. print its contents.
@@ -278,7 +278,7 @@ func ExampleDecoder_Decode() {
func ExampleEncoder_Encode() {
// Create an AdvRefs with the contents you want...
- ar := advrefs.New()
+ ar := packp.NewAdvRefs()
// ...add a hash for the HEAD...
head := plumbing.NewHash("1111111111111111111111111111111111111111")
@@ -299,11 +299,11 @@ func ExampleEncoder_Encode() {
// ...and finally add a shallow
ar.Shallows = append(ar.Shallows, plumbing.NewHash("5555555555555555555555555555555555555555"))
- // Encode the advrefs.Contents to a bytes.Buffer.
+ // Encode the packpContents to a bytes.Buffer.
// You can encode into stdout too, but you will not be able
// see the '\x00' after "HEAD".
var buf bytes.Buffer
- e := advrefs.NewEncoder(&buf)
+ e := packp.NewAdvRefsEncoder(&buf)
_ = e.Encode(ar) // error checks ignored for brevity
// Print the contents of the buffer as a quoted string.
diff --git a/plumbing/format/packp/capabilities.go b/plumbing/protocol/packp/capabilities.go
index d77c2fa..d77c2fa 100644
--- a/plumbing/format/packp/capabilities.go
+++ b/plumbing/protocol/packp/capabilities.go
diff --git a/plumbing/format/packp/capabilities_test.go b/plumbing/protocol/packp/capabilities_test.go
index e42a0c7..a909e4c 100644
--- a/plumbing/format/packp/capabilities_test.go
+++ b/plumbing/protocol/packp/capabilities_test.go
@@ -1,13 +1,9 @@
package packp
import (
- "testing"
-
. "gopkg.in/check.v1"
)
-func Test(t *testing.T) { TestingT(t) }
-
type SuiteCapabilities struct{}
var _ = Suite(&SuiteCapabilities{})
diff --git a/plumbing/protocol/packp/common.go b/plumbing/protocol/packp/common.go
new file mode 100644
index 0000000..c4b44f7
--- /dev/null
+++ b/plumbing/protocol/packp/common.go
@@ -0,0 +1,45 @@
+package packp
+
+import "bytes"
+
+type stateFn func() stateFn
+
+const (
+ // common
+ hashSize = 40
+
+ // advrefs
+ head = "HEAD"
+ noHead = "capabilities^{}"
+)
+
+var (
+ // common
+ sp = []byte(" ")
+ eol = []byte("\n")
+
+ // advrefs
+ null = []byte("\x00")
+ peeled = []byte("^{}")
+ noHeadMark = []byte(" capabilities^{}\x00")
+
+ // ulreq
+ want = []byte("want ")
+ shallow = []byte("shallow ")
+ deepen = []byte("deepen")
+ deepenCommits = []byte("deepen ")
+ deepenSince = []byte("deepen-since ")
+ deepenReference = []byte("deepen-not ")
+)
+
+// Capabilities are a single string or a name=value.
+// Even though we are only going to read at moust 1 value, we return
+// a slice of values, as Capability.Add receives that.
+func readCapability(data []byte) (name string, values []string) {
+ pair := bytes.SplitN(data, []byte{'='}, 2)
+ if len(pair) == 2 {
+ values = append(values, string(pair[1]))
+ }
+
+ return string(pair[0]), values
+}
diff --git a/plumbing/protocol/packp/common_test.go b/plumbing/protocol/packp/common_test.go
new file mode 100644
index 0000000..c055fee
--- /dev/null
+++ b/plumbing/protocol/packp/common_test.go
@@ -0,0 +1,33 @@
+package packp
+
+import (
+ "bytes"
+ "io"
+ "testing"
+
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
+
+ . "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) { TestingT(t) }
+
+// returns a byte slice with the pkt-lines for the given payloads.
+func pktlines(c *C, payloads ...string) []byte {
+ var buf bytes.Buffer
+ e := pktline.NewEncoder(&buf)
+
+ err := e.EncodeString(payloads...)
+ c.Assert(err, IsNil, Commentf("building pktlines for %v\n", payloads))
+
+ return buf.Bytes()
+}
+
+func toPktLines(c *C, payloads []string) io.Reader {
+ var buf bytes.Buffer
+ e := pktline.NewEncoder(&buf)
+ err := e.EncodeString(payloads...)
+ c.Assert(err, IsNil)
+
+ return &buf
+}
diff --git a/plumbing/format/packp/doc.go b/plumbing/protocol/packp/doc.go
index 4950d1d..4950d1d 100644
--- a/plumbing/format/packp/doc.go
+++ b/plumbing/protocol/packp/doc.go
diff --git a/plumbing/format/packp/ulreq/ulreq.go b/plumbing/protocol/packp/ulreq.go
index d2cc7c0..5870001 100644
--- a/plumbing/format/packp/ulreq/ulreq.go
+++ b/plumbing/protocol/packp/ulreq.go
@@ -1,19 +1,16 @@
-// Package ulreq implements encoding and decoding upload-request
-// messages from a git-upload-pack command.
-package ulreq
+package packp
import (
"time"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
)
// UlReq values represent the information transmitted on a
// upload-request message. Values from this type are not zero-value
// safe, use the New function instead.
type UlReq struct {
- Capabilities *packp.Capabilities
+ Capabilities *Capabilities
Wants []plumbing.Hash
Shallows []plumbing.Hash
Depth Depth
@@ -42,13 +39,13 @@ type DepthReference string
func (d DepthReference) isDepth() {}
-// New returns a pointer to a new UlReq value, ready to be used. It has
+// NewUlReq returns a pointer to a new UlReq value, ready to be used. It has
// no capabilities, wants or shallows and an infinite depth. Please
// note that to encode an upload-request it has to have at least one
// wanted hash.
-func New() *UlReq {
+func NewUlReq() *UlReq {
return &UlReq{
- Capabilities: packp.NewCapabilities(),
+ Capabilities: NewCapabilities(),
Wants: []plumbing.Hash{},
Shallows: []plumbing.Hash{},
Depth: DepthCommits(0),
diff --git a/plumbing/format/packp/ulreq/decoder.go b/plumbing/protocol/packp/ulreq_decoder.go
index 9083e04..67ba479 100644
--- a/plumbing/format/packp/ulreq/decoder.go
+++ b/plumbing/protocol/packp/ulreq_decoder.go
@@ -1,4 +1,4 @@
-package ulreq
+package packp
import (
"bytes"
@@ -9,26 +9,11 @@ import (
"time"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
)
-const (
- hashSize = 40
-)
-
-var (
- eol = []byte("\n")
- sp = []byte(" ")
- want = []byte("want ")
- shallow = []byte("shallow ")
- deepen = []byte("deepen")
- deepenCommits = []byte("deepen ")
- deepenSince = []byte("deepen-since ")
- deepenReference = []byte("deepen-not ")
-)
-
-// A Decoder reads and decodes AdvRef values from an input stream.
-type Decoder struct {
+// A UlReqDecoder reads and decodes AdvRef values from an input stream.
+type UlReqDecoder struct {
s *pktline.Scanner // a pkt-line scanner from the input stream
line []byte // current pkt-line contents, use parser.nextLine() to make it advance
nLine int // current pkt-line number for debugging, begins at 1
@@ -36,31 +21,29 @@ type Decoder struct {
data *UlReq // parsed data is stored here
}
-// NewDecoder returns a new decoder that reads from r.
+// NewUlReqDecoder returns a new decoder that reads from r.
//
// Will not read more data from r than necessary.
-func NewDecoder(r io.Reader) *Decoder {
- return &Decoder{
+func NewUlReqDecoder(r io.Reader) *UlReqDecoder {
+ return &UlReqDecoder{
s: pktline.NewScanner(r),
}
}
// Decode reads the next upload-request form its input and
// stores it in the value pointed to by v.
-func (d *Decoder) Decode(v *UlReq) error {
+func (d *UlReqDecoder) Decode(v *UlReq) error {
d.data = v
- for state := decodeFirstWant; state != nil; {
- state = state(d)
+ for state := d.decodeFirstWant; state != nil; {
+ state = state()
}
return d.err
}
-type decoderStateFn func(*Decoder) decoderStateFn
-
// fills out the parser stiky error
-func (d *Decoder) error(format string, a ...interface{}) {
+func (d *UlReqDecoder) error(format string, a ...interface{}) {
d.err = fmt.Errorf("pkt-line %d: %s", d.nLine,
fmt.Sprintf(format, a...))
}
@@ -69,7 +52,7 @@ func (d *Decoder) error(format string, a ...interface{}) {
// p.line and increments p.nLine. A successful invocation returns true,
// otherwise, false is returned and the sticky error is filled out
// accordingly. Trims eols at the end of the payloads.
-func (d *Decoder) nextLine() bool {
+func (d *UlReqDecoder) nextLine() bool {
d.nLine++
if !d.s.Scan() {
@@ -88,7 +71,7 @@ func (d *Decoder) nextLine() bool {
}
// Expected format: want <hash>[ capabilities]
-func decodeFirstWant(d *Decoder) decoderStateFn {
+func (d *UlReqDecoder) decodeFirstWant() stateFn {
if ok := d.nextLine(); !ok {
return nil
}
@@ -105,10 +88,10 @@ func decodeFirstWant(d *Decoder) decoderStateFn {
}
d.data.Wants = append(d.data.Wants, hash)
- return decodeCaps
+ return d.decodeCaps
}
-func (d *Decoder) readHash() (plumbing.Hash, bool) {
+func (d *UlReqDecoder) readHash() (plumbing.Hash, bool) {
if len(d.line) < hashSize {
d.err = fmt.Errorf("malformed hash: %v", d.line)
return plumbing.ZeroHash, false
@@ -125,9 +108,9 @@ func (d *Decoder) readHash() (plumbing.Hash, bool) {
}
// Expected format: sp cap1 sp cap2 sp cap3...
-func decodeCaps(d *Decoder) decoderStateFn {
+func (d *UlReqDecoder) decodeCaps() stateFn {
if len(d.line) == 0 {
- return decodeOtherWants
+ return d.decodeOtherWants
}
d.line = bytes.TrimPrefix(d.line, sp)
@@ -137,33 +120,21 @@ func decodeCaps(d *Decoder) decoderStateFn {
d.data.Capabilities.Add(name, values...)
}
- return decodeOtherWants
-}
-
-// Capabilities are a single string or a name=value.
-// Even though we are only going to read at moust 1 value, we return
-// a slice of values, as Capability.Add receives that.
-func readCapability(data []byte) (name string, values []string) {
- pair := bytes.SplitN(data, []byte{'='}, 2)
- if len(pair) == 2 {
- values = append(values, string(pair[1]))
- }
-
- return string(pair[0]), values
+ return d.decodeOtherWants
}
// Expected format: want <hash>
-func decodeOtherWants(d *Decoder) decoderStateFn {
+func (d *UlReqDecoder) decodeOtherWants() stateFn {
if ok := d.nextLine(); !ok {
return nil
}
if bytes.HasPrefix(d.line, shallow) {
- return decodeShallow
+ return d.decodeShallow
}
if bytes.HasPrefix(d.line, deepen) {
- return decodeDeepen
+ return d.decodeDeepen
}
if len(d.line) == 0 {
@@ -182,13 +153,13 @@ func decodeOtherWants(d *Decoder) decoderStateFn {
}
d.data.Wants = append(d.data.Wants, hash)
- return decodeOtherWants
+ return d.decodeOtherWants
}
// Expected format: shallow <hash>
-func decodeShallow(d *Decoder) decoderStateFn {
+func (d *UlReqDecoder) decodeShallow() stateFn {
if bytes.HasPrefix(d.line, deepen) {
- return decodeDeepen
+ return d.decodeDeepen
}
if len(d.line) == 0 {
@@ -211,21 +182,21 @@ func decodeShallow(d *Decoder) decoderStateFn {
return nil
}
- return decodeShallow
+ return d.decodeShallow
}
// Expected format: deepen <n> / deepen-since <ul> / deepen-not <ref>
-func decodeDeepen(d *Decoder) decoderStateFn {
+func (d *UlReqDecoder) decodeDeepen() stateFn {
if bytes.HasPrefix(d.line, deepenCommits) {
- return decodeDeepenCommits
+ return d.decodeDeepenCommits
}
if bytes.HasPrefix(d.line, deepenSince) {
- return decodeDeepenSince
+ return d.decodeDeepenSince
}
if bytes.HasPrefix(d.line, deepenReference) {
- return decodeDeepenReference
+ return d.decodeDeepenReference
}
if len(d.line) == 0 {
@@ -236,7 +207,7 @@ func decodeDeepen(d *Decoder) decoderStateFn {
return nil
}
-func decodeDeepenCommits(d *Decoder) decoderStateFn {
+func (d *UlReqDecoder) decodeDeepenCommits() stateFn {
d.line = bytes.TrimPrefix(d.line, deepenCommits)
var n int
@@ -249,10 +220,10 @@ func decodeDeepenCommits(d *Decoder) decoderStateFn {
}
d.data.Depth = DepthCommits(n)
- return decodeFlush
+ return d.decodeFlush
}
-func decodeDeepenSince(d *Decoder) decoderStateFn {
+func (d *UlReqDecoder) decodeDeepenSince() stateFn {
d.line = bytes.TrimPrefix(d.line, deepenSince)
var secs int64
@@ -263,18 +234,18 @@ func decodeDeepenSince(d *Decoder) decoderStateFn {
t := time.Unix(secs, 0).UTC()
d.data.Depth = DepthSince(t)
- return decodeFlush
+ return d.decodeFlush
}
-func decodeDeepenReference(d *Decoder) decoderStateFn {
+func (d *UlReqDecoder) decodeDeepenReference() stateFn {
d.line = bytes.TrimPrefix(d.line, deepenReference)
d.data.Depth = DepthReference(string(d.line))
- return decodeFlush
+ return d.decodeFlush
}
-func decodeFlush(d *Decoder) decoderStateFn {
+func (d *UlReqDecoder) decodeFlush() stateFn {
if ok := d.nextLine(); !ok {
return nil
}
diff --git a/plumbing/format/packp/ulreq/decoder_test.go b/plumbing/protocol/packp/ulreq_decoder_test.go
index 01e4f90..e90962a 100644
--- a/plumbing/format/packp/ulreq/decoder_test.go
+++ b/plumbing/protocol/packp/ulreq_decoder_test.go
@@ -1,4 +1,4 @@
-package ulreq
+package packp
import (
"bytes"
@@ -7,79 +7,70 @@ import (
"time"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
)
-type SuiteDecoder struct{}
+type UlReqDecodeSuite struct{}
-var _ = Suite(&SuiteDecoder{})
+var _ = Suite(&UlReqDecodeSuite{})
-func (s *SuiteDecoder) TestEmpty(c *C) {
- ur := New()
+func (s *UlReqDecodeSuite) TestEmpty(c *C) {
+ ur := NewUlReq()
var buf bytes.Buffer
- d := NewDecoder(&buf)
+ d := NewUlReqDecoder(&buf)
err := d.Decode(ur)
c.Assert(err, ErrorMatches, "pkt-line 1: EOF")
}
-func (s *SuiteDecoder) TestNoWant(c *C) {
+func (s *UlReqDecodeSuite) TestNoWant(c *C) {
payloads := []string{
"foobar",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*missing 'want '.*")
+ s.testDecoderErrorMatches(c, r, ".*missing 'want '.*")
}
-func toPktLines(c *C, payloads []string) io.Reader {
- var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil)
-
- return &buf
-}
-
-func testDecoderErrorMatches(c *C, input io.Reader, pattern string) {
- ur := New()
- d := NewDecoder(input)
+func (s *UlReqDecodeSuite) testDecoderErrorMatches(c *C, input io.Reader, pattern string) {
+ ur := NewUlReq()
+ d := NewUlReqDecoder(input)
err := d.Decode(ur)
c.Assert(err, ErrorMatches, pattern)
}
-func (s *SuiteDecoder) TestInvalidFirstHash(c *C) {
+func (s *UlReqDecodeSuite) TestInvalidFirstHash(c *C) {
payloads := []string{
"want 6ecf0ef2c2dffb796alberto2219af86ec6584e5\n",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid hash.*")
+ s.testDecoderErrorMatches(c, r, ".*invalid hash.*")
}
-func (s *SuiteDecoder) TestWantOK(c *C) {
+func (s *UlReqDecodeSuite) TestWantOK(c *C) {
payloads := []string{
"want 1111111111111111111111111111111111111111",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
c.Assert(ur.Wants, DeepEquals, []plumbing.Hash{
plumbing.NewHash("1111111111111111111111111111111111111111"),
})
}
-func testDecodeOK(c *C, payloads []string) *UlReq {
+func (s *UlReqDecodeSuite) testDecodeOK(c *C, payloads []string) *UlReq {
var buf bytes.Buffer
e := pktline.NewEncoder(&buf)
err := e.EncodeString(payloads...)
c.Assert(err, IsNil)
- ur := New()
- d := NewDecoder(&buf)
+ ur := NewUlReq()
+ d := NewUlReqDecoder(&buf)
err = d.Decode(ur)
c.Assert(err, IsNil)
@@ -87,12 +78,12 @@ func testDecodeOK(c *C, payloads []string) *UlReq {
return ur
}
-func (s *SuiteDecoder) TestWantWithCapabilities(c *C) {
+func (s *UlReqDecodeSuite) TestWantWithCapabilities(c *C) {
payloads := []string{
"want 1111111111111111111111111111111111111111 ofs-delta multi_ack",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
c.Assert(ur.Wants, DeepEquals, []plumbing.Hash{
plumbing.NewHash("1111111111111111111111111111111111111111")})
@@ -100,7 +91,7 @@ func (s *SuiteDecoder) TestWantWithCapabilities(c *C) {
c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
}
-func (s *SuiteDecoder) TestManyWantsNoCapabilities(c *C) {
+func (s *UlReqDecodeSuite) TestManyWantsNoCapabilities(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333",
"want 4444444444444444444444444444444444444444",
@@ -108,7 +99,7 @@ func (s *SuiteDecoder) TestManyWantsNoCapabilities(c *C) {
"want 2222222222222222222222222222222222222222",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
expected := []plumbing.Hash{
plumbing.NewHash("1111111111111111111111111111111111111111"),
@@ -132,7 +123,7 @@ func (a byHash) Less(i, j int) bool {
return bytes.Compare(ii[:], jj[:]) < 0
}
-func (s *SuiteDecoder) TestManyWantsBadWant(c *C) {
+func (s *UlReqDecodeSuite) TestManyWantsBadWant(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333",
"want 4444444444444444444444444444444444444444",
@@ -141,10 +132,10 @@ func (s *SuiteDecoder) TestManyWantsBadWant(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
+ s.testDecoderErrorMatches(c, r, ".*unexpected payload.*")
}
-func (s *SuiteDecoder) TestManyWantsInvalidHash(c *C) {
+func (s *UlReqDecodeSuite) TestManyWantsInvalidHash(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333",
"want 4444444444444444444444444444444444444444",
@@ -153,10 +144,10 @@ func (s *SuiteDecoder) TestManyWantsInvalidHash(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed hash.*")
+ s.testDecoderErrorMatches(c, r, ".*malformed hash.*")
}
-func (s *SuiteDecoder) TestManyWantsWithCapabilities(c *C) {
+func (s *UlReqDecodeSuite) TestManyWantsWithCapabilities(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"want 4444444444444444444444444444444444444444",
@@ -164,7 +155,7 @@ func (s *SuiteDecoder) TestManyWantsWithCapabilities(c *C) {
"want 2222222222222222222222222222222222222222",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
expected := []plumbing.Hash{
plumbing.NewHash("1111111111111111111111111111111111111111"),
@@ -181,13 +172,13 @@ func (s *SuiteDecoder) TestManyWantsWithCapabilities(c *C) {
c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
}
-func (s *SuiteDecoder) TestSingleShallowSingleWant(c *C) {
+func (s *UlReqDecodeSuite) TestSingleShallowSingleWant(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
expectedWants := []plumbing.Hash{
plumbing.NewHash("3333333333333333333333333333333333333333"),
@@ -204,7 +195,7 @@ func (s *SuiteDecoder) TestSingleShallowSingleWant(c *C) {
c.Assert(ur.Shallows, DeepEquals, expectedShallows)
}
-func (s *SuiteDecoder) TestSingleShallowManyWants(c *C) {
+func (s *UlReqDecodeSuite) TestSingleShallowManyWants(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"want 4444444444444444444444444444444444444444",
@@ -213,7 +204,7 @@ func (s *SuiteDecoder) TestSingleShallowManyWants(c *C) {
"shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
expectedWants := []plumbing.Hash{
plumbing.NewHash("1111111111111111111111111111111111111111"),
@@ -235,7 +226,7 @@ func (s *SuiteDecoder) TestSingleShallowManyWants(c *C) {
c.Assert(ur.Shallows, DeepEquals, expectedShallows)
}
-func (s *SuiteDecoder) TestManyShallowSingleWant(c *C) {
+func (s *UlReqDecodeSuite) TestManyShallowSingleWant(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
@@ -244,7 +235,7 @@ func (s *SuiteDecoder) TestManyShallowSingleWant(c *C) {
"shallow dddddddddddddddddddddddddddddddddddddddd",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
expectedWants := []plumbing.Hash{
plumbing.NewHash("3333333333333333333333333333333333333333"),
@@ -266,7 +257,7 @@ func (s *SuiteDecoder) TestManyShallowSingleWant(c *C) {
c.Assert(ur.Shallows, DeepEquals, expectedShallows)
}
-func (s *SuiteDecoder) TestManyShallowManyWants(c *C) {
+func (s *UlReqDecodeSuite) TestManyShallowManyWants(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"want 4444444444444444444444444444444444444444",
@@ -278,7 +269,7 @@ func (s *SuiteDecoder) TestManyShallowManyWants(c *C) {
"shallow dddddddddddddddddddddddddddddddddddddddd",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
expectedWants := []plumbing.Hash{
plumbing.NewHash("1111111111111111111111111111111111111111"),
@@ -305,27 +296,27 @@ func (s *SuiteDecoder) TestManyShallowManyWants(c *C) {
c.Assert(ur.Shallows, DeepEquals, expectedShallows)
}
-func (s *SuiteDecoder) TestMalformedShallow(c *C) {
+func (s *UlReqDecodeSuite) TestMalformedShallow(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"shalow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
+ s.testDecoderErrorMatches(c, r, ".*unexpected payload.*")
}
-func (s *SuiteDecoder) TestMalformedShallowHash(c *C) {
+func (s *UlReqDecodeSuite) TestMalformedShallowHash(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*malformed hash.*")
+ s.testDecoderErrorMatches(c, r, ".*malformed hash.*")
}
-func (s *SuiteDecoder) TestMalformedShallowManyShallows(c *C) {
+func (s *UlReqDecodeSuite) TestMalformedShallowManyShallows(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
@@ -334,30 +325,30 @@ func (s *SuiteDecoder) TestMalformedShallowManyShallows(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
+ s.testDecoderErrorMatches(c, r, ".*unexpected payload.*")
}
-func (s *SuiteDecoder) TestMalformedDeepenSpec(c *C) {
+func (s *UlReqDecodeSuite) TestMalformedDeepenSpec(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"deepen-foo 34",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected deepen.*")
+ s.testDecoderErrorMatches(c, r, ".*unexpected deepen.*")
}
-func (s *SuiteDecoder) TestMalformedDeepenSingleWant(c *C) {
+func (s *UlReqDecodeSuite) TestMalformedDeepenSingleWant(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"depth 32",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
+ s.testDecoderErrorMatches(c, r, ".*unexpected payload.*")
}
-func (s *SuiteDecoder) TestMalformedDeepenMultiWant(c *C) {
+func (s *UlReqDecodeSuite) TestMalformedDeepenMultiWant(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"want 2222222222222222222222222222222222222222",
@@ -365,10 +356,10 @@ func (s *SuiteDecoder) TestMalformedDeepenMultiWant(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
+ s.testDecoderErrorMatches(c, r, ".*unexpected payload.*")
}
-func (s *SuiteDecoder) TestMalformedDeepenWithSingleShallow(c *C) {
+func (s *UlReqDecodeSuite) TestMalformedDeepenWithSingleShallow(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"shallow 2222222222222222222222222222222222222222",
@@ -376,10 +367,10 @@ func (s *SuiteDecoder) TestMalformedDeepenWithSingleShallow(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
+ s.testDecoderErrorMatches(c, r, ".*unexpected payload.*")
}
-func (s *SuiteDecoder) TestMalformedDeepenWithMultiShallow(c *C) {
+func (s *UlReqDecodeSuite) TestMalformedDeepenWithMultiShallow(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"shallow 2222222222222222222222222222222222222222",
@@ -388,16 +379,16 @@ func (s *SuiteDecoder) TestMalformedDeepenWithMultiShallow(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
+ s.testDecoderErrorMatches(c, r, ".*unexpected payload.*")
}
-func (s *SuiteDecoder) TestDeepenCommits(c *C) {
+func (s *UlReqDecodeSuite) TestDeepenCommits(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"deepen 1234",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
c.Assert(ur.Depth, FitsTypeOf, DepthCommits(0))
commits, ok := ur.Depth.(DepthCommits)
@@ -405,13 +396,13 @@ func (s *SuiteDecoder) TestDeepenCommits(c *C) {
c.Assert(int(commits), Equals, 1234)
}
-func (s *SuiteDecoder) TestDeepenCommitsInfiniteInplicit(c *C) {
+func (s *UlReqDecodeSuite) TestDeepenCommitsInfiniteInplicit(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"deepen 0",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
c.Assert(ur.Depth, FitsTypeOf, DepthCommits(0))
commits, ok := ur.Depth.(DepthCommits)
@@ -419,12 +410,12 @@ func (s *SuiteDecoder) TestDeepenCommitsInfiniteInplicit(c *C) {
c.Assert(int(commits), Equals, 0)
}
-func (s *SuiteDecoder) TestDeepenCommitsInfiniteExplicit(c *C) {
+func (s *UlReqDecodeSuite) TestDeepenCommitsInfiniteExplicit(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
c.Assert(ur.Depth, FitsTypeOf, DepthCommits(0))
commits, ok := ur.Depth.(DepthCommits)
@@ -432,33 +423,33 @@ func (s *SuiteDecoder) TestDeepenCommitsInfiniteExplicit(c *C) {
c.Assert(int(commits), Equals, 0)
}
-func (s *SuiteDecoder) TestMalformedDeepenCommits(c *C) {
+func (s *UlReqDecodeSuite) TestMalformedDeepenCommits(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"deepen -32",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*negative depth.*")
+ s.testDecoderErrorMatches(c, r, ".*negative depth.*")
}
-func (s *SuiteDecoder) TestDeepenCommitsEmpty(c *C) {
+func (s *UlReqDecodeSuite) TestDeepenCommitsEmpty(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"deepen ",
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*invalid syntax.*")
+ s.testDecoderErrorMatches(c, r, ".*invalid syntax.*")
}
-func (s *SuiteDecoder) TestDeepenSince(c *C) {
+func (s *UlReqDecodeSuite) TestDeepenSince(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"deepen-since 1420167845", // 2015-01-02T03:04:05+00:00
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
expected := time.Date(2015, time.January, 2, 3, 4, 5, 0, time.UTC)
@@ -469,13 +460,13 @@ func (s *SuiteDecoder) TestDeepenSince(c *C) {
Commentf("obtained=%s\nexpected=%s", time.Time(since), expected))
}
-func (s *SuiteDecoder) TestDeepenReference(c *C) {
+func (s *UlReqDecodeSuite) TestDeepenReference(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"deepen-not refs/heads/master",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
expected := "refs/heads/master"
@@ -485,7 +476,7 @@ func (s *SuiteDecoder) TestDeepenReference(c *C) {
c.Assert(string(reference), Equals, expected)
}
-func (s *SuiteDecoder) TestAll(c *C) {
+func (s *UlReqDecodeSuite) TestAll(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"want 4444444444444444444444444444444444444444",
@@ -498,7 +489,7 @@ func (s *SuiteDecoder) TestAll(c *C) {
"deepen 1234",
pktline.FlushString,
}
- ur := testDecodeOK(c, payloads)
+ ur := s.testDecodeOK(c, payloads)
expectedWants := []plumbing.Hash{
plumbing.NewHash("1111111111111111111111111111111111111111"),
@@ -529,7 +520,7 @@ func (s *SuiteDecoder) TestAll(c *C) {
c.Assert(int(commits), Equals, 1234)
}
-func (s *SuiteDecoder) TestExtraData(c *C) {
+func (s *UlReqDecodeSuite) TestExtraData(c *C) {
payloads := []string{
"want 3333333333333333333333333333333333333333 ofs-delta multi_ack",
"deepen 32",
@@ -537,5 +528,5 @@ func (s *SuiteDecoder) TestExtraData(c *C) {
pktline.FlushString,
}
r := toPktLines(c, payloads)
- testDecoderErrorMatches(c, r, ".*unexpected payload.*")
+ s.testDecoderErrorMatches(c, r, ".*unexpected payload.*")
}
diff --git a/plumbing/format/packp/ulreq/encoder.go b/plumbing/protocol/packp/ulreq_encoder.go
index 1264e0e..9ebc4b5 100644
--- a/plumbing/format/packp/ulreq/encoder.go
+++ b/plumbing/protocol/packp/ulreq_encoder.go
@@ -1,4 +1,4 @@
-package ulreq
+package packp
import (
"fmt"
@@ -7,20 +7,20 @@ import (
"time"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
)
-// An Encoder writes UlReq values to an output stream.
-type Encoder struct {
+// An UlReqEncoder writes UlReq values to an output stream.
+type UlReqEncoder struct {
pe *pktline.Encoder // where to write the encoded data
data *UlReq // the data to encode
sortedWants []string
err error // sticky error
}
-// NewEncoder returns a new encoder that writes to w.
-func NewEncoder(w io.Writer) *Encoder {
- return &Encoder{
+// NewUlReqEncoder returns a new encoder that writes to w.
+func NewUlReqEncoder(w io.Writer) *UlReqEncoder {
+ return &UlReqEncoder{
pe: pktline.NewEncoder(w),
}
}
@@ -30,7 +30,7 @@ func NewEncoder(w io.Writer) *Encoder {
// All the payloads will end with a newline character. Wants and
// shallows are sorted alphabetically. A depth of 0 means no depth
// request is sent.
-func (e *Encoder) Encode(v *UlReq) error {
+func (e *UlReqEncoder) Encode(v *UlReq) error {
if len(v.Wants) == 0 {
return fmt.Errorf("empty wants provided")
}
@@ -38,15 +38,13 @@ func (e *Encoder) Encode(v *UlReq) error {
e.data = v
e.sortedWants = sortHashes(v.Wants)
- for state := encodeFirstWant; state != nil; {
- state = state(e)
+ for state := e.encodeFirstWant; state != nil; {
+ state = state()
}
return e.err
}
-type encoderStateFn func(*Encoder) encoderStateFn
-
func sortHashes(list []plumbing.Hash) []string {
sorted := make([]string, len(list))
for i, hash := range list {
@@ -57,7 +55,7 @@ func sortHashes(list []plumbing.Hash) []string {
return sorted
}
-func encodeFirstWant(e *Encoder) encoderStateFn {
+func (e *UlReqEncoder) encodeFirstWant() stateFn {
var err error
if e.data.Capabilities.IsEmpty() {
err = e.pe.Encodef("want %s\n", e.sortedWants[0])
@@ -74,10 +72,10 @@ func encodeFirstWant(e *Encoder) encoderStateFn {
return nil
}
- return encodeAditionalWants
+ return e.encodeAditionalWants
}
-func encodeAditionalWants(e *Encoder) encoderStateFn {
+func (e *UlReqEncoder) encodeAditionalWants() stateFn {
for _, w := range e.sortedWants[1:] {
if err := e.pe.Encodef("want %s\n", w); err != nil {
e.err = fmt.Errorf("encoding want %q: %s", w, err)
@@ -85,10 +83,10 @@ func encodeAditionalWants(e *Encoder) encoderStateFn {
}
}
- return encodeShallows
+ return e.encodeShallows
}
-func encodeShallows(e *Encoder) encoderStateFn {
+func (e *UlReqEncoder) encodeShallows() stateFn {
sorted := sortHashes(e.data.Shallows)
for _, s := range sorted {
if err := e.pe.Encodef("shallow %s\n", s); err != nil {
@@ -97,10 +95,10 @@ func encodeShallows(e *Encoder) encoderStateFn {
}
}
- return encodeDepth
+ return e.encodeDepth
}
-func encodeDepth(e *Encoder) encoderStateFn {
+func (e *UlReqEncoder) encodeDepth() stateFn {
switch depth := e.data.Depth.(type) {
case DepthCommits:
if depth != 0 {
@@ -127,10 +125,10 @@ func encodeDepth(e *Encoder) encoderStateFn {
return nil
}
- return encodeFlush
+ return e.encodeFlush
}
-func encodeFlush(e *Encoder) encoderStateFn {
+func (e *UlReqEncoder) encodeFlush() stateFn {
if err := e.pe.Flush(); err != nil {
e.err = fmt.Errorf("encoding flush-pkt: %s", err)
return nil
diff --git a/plumbing/format/packp/ulreq/encoder_test.go b/plumbing/protocol/packp/ulreq_encoder_test.go
index 44c6d26..fb83653 100644
--- a/plumbing/format/packp/ulreq/encoder_test.go
+++ b/plumbing/protocol/packp/ulreq_encoder_test.go
@@ -1,33 +1,22 @@
-package ulreq
+package packp
import (
"bytes"
"time"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
)
-type SuiteEncoder struct{}
+type UlReqEncodeSuite struct{}
-var _ = Suite(&SuiteEncoder{})
+var _ = Suite(&UlReqEncodeSuite{})
-// returns a byte slice with the pkt-lines for the given payloads.
-func pktlines(c *C, payloads ...string) []byte {
+func testUlReqEncode(c *C, ur *UlReq, expectedPayloads []string) {
var buf bytes.Buffer
- e := pktline.NewEncoder(&buf)
-
- err := e.EncodeString(payloads...)
- c.Assert(err, IsNil, Commentf("building pktlines for %v\n", payloads))
-
- return buf.Bytes()
-}
-
-func testEncode(c *C, ur *UlReq, expectedPayloads []string) {
- var buf bytes.Buffer
- e := NewEncoder(&buf)
+ e := NewUlReqEncoder(&buf)
err := e.Encode(ur)
c.Assert(err, IsNil)
@@ -40,23 +29,23 @@ func testEncode(c *C, ur *UlReq, expectedPayloads []string) {
c.Assert(obtained, DeepEquals, expected, comment)
}
-func testEncodeError(c *C, ur *UlReq, expectedErrorRegEx string) {
+func testUlReqEncodeError(c *C, ur *UlReq, expectedErrorRegEx string) {
var buf bytes.Buffer
- e := NewEncoder(&buf)
+ e := NewUlReqEncoder(&buf)
err := e.Encode(ur)
c.Assert(err, ErrorMatches, expectedErrorRegEx)
}
-func (s *SuiteEncoder) TestZeroValue(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestZeroValue(c *C) {
+ ur := NewUlReq()
expectedErrorRegEx := ".*empty wants.*"
- testEncodeError(c, ur, expectedErrorRegEx)
+ testUlReqEncodeError(c, ur, expectedErrorRegEx)
}
-func (s *SuiteEncoder) TestOneWant(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestOneWant(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
expected := []string{
@@ -64,11 +53,11 @@ func (s *SuiteEncoder) TestOneWant(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestOneWantWithCapabilities(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestOneWantWithCapabilities(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
ur.Capabilities.Add("sysref", "HEAD:/refs/heads/master")
ur.Capabilities.Add("multi_ack")
@@ -81,11 +70,11 @@ func (s *SuiteEncoder) TestOneWantWithCapabilities(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestWants(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestWants(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("4444444444444444444444444444444444444444"))
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
ur.Wants = append(ur.Wants, plumbing.NewHash("3333333333333333333333333333333333333333"))
@@ -101,11 +90,11 @@ func (s *SuiteEncoder) TestWants(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestWantsWithCapabilities(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestWantsWithCapabilities(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("4444444444444444444444444444444444444444"))
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
ur.Wants = append(ur.Wants, plumbing.NewHash("3333333333333333333333333333333333333333"))
@@ -127,11 +116,11 @@ func (s *SuiteEncoder) TestWantsWithCapabilities(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestShallow(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestShallow(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
ur.Capabilities.Add("multi_ack")
ur.Shallows = append(ur.Shallows, plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))
@@ -142,11 +131,11 @@ func (s *SuiteEncoder) TestShallow(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestManyShallows(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestManyShallows(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
ur.Capabilities.Add("multi_ack")
ur.Shallows = append(ur.Shallows, plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"))
@@ -163,11 +152,11 @@ func (s *SuiteEncoder) TestManyShallows(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestDepthCommits(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestDepthCommits(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
ur.Depth = DepthCommits(1234)
@@ -177,11 +166,11 @@ func (s *SuiteEncoder) TestDepthCommits(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestDepthSinceUTC(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestDepthSinceUTC(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
since := time.Date(2015, time.January, 2, 3, 4, 5, 0, time.UTC)
ur.Depth = DepthSince(since)
@@ -192,11 +181,11 @@ func (s *SuiteEncoder) TestDepthSinceUTC(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestDepthSinceNonUTC(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestDepthSinceNonUTC(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
berlin, err := time.LoadLocation("Europe/Berlin")
c.Assert(err, IsNil)
@@ -211,11 +200,11 @@ func (s *SuiteEncoder) TestDepthSinceNonUTC(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestDepthReference(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestDepthReference(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
ur.Depth = DepthReference("refs/heads/feature-foo")
@@ -225,11 +214,11 @@ func (s *SuiteEncoder) TestDepthReference(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
-func (s *SuiteEncoder) TestAll(c *C) {
- ur := New()
+func (s *UlReqEncodeSuite) TestAll(c *C) {
+ ur := NewUlReq()
ur.Wants = append(ur.Wants, plumbing.NewHash("4444444444444444444444444444444444444444"))
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
ur.Wants = append(ur.Wants, plumbing.NewHash("3333333333333333333333333333333333333333"))
@@ -264,5 +253,5 @@ func (s *SuiteEncoder) TestAll(c *C) {
pktline.FlushString,
}
- testEncode(c, ur, expected)
+ testUlReqEncode(c, ur, expected)
}
diff --git a/plumbing/format/packp/ulreq/ulreq_test.go b/plumbing/protocol/packp/ulreq_test.go
index 06963ff..19b6dd0 100644
--- a/plumbing/format/packp/ulreq/ulreq_test.go
+++ b/plumbing/protocol/packp/ulreq_test.go
@@ -1,23 +1,18 @@
-package ulreq
+package packp
import (
"fmt"
"os"
"strings"
- "testing"
"time"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
-
- . "gopkg.in/check.v1"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
)
-func Test(t *testing.T) { TestingT(t) }
-
-func ExampleEncoder_Encode() {
+func ExampleUlReqEncoder_Encode() {
// Create an empty UlReq with the contents you want...
- ur := New()
+ ur := NewUlReq()
// Add a couple of wants
ur.Wants = append(ur.Wants, plumbing.NewHash("3333333333333333333333333333333333333333"))
@@ -37,7 +32,7 @@ func ExampleEncoder_Encode() {
ur.Depth = DepthSince(since)
// Create a new Encode for the stdout...
- e := NewEncoder(os.Stdout)
+ e := NewUlReqEncoder(os.Stdout)
// ...and encode the upload-request to it.
_ = e.Encode(ur) // ignoring errors for brevity
// Output:
@@ -50,7 +45,7 @@ func ExampleEncoder_Encode() {
// 0000
}
-func ExampleDecoder_Decode() {
+func ExampleUlReqDecoder_Decode() {
// Here is a raw advertised-ref message.
raw := "" +
"005bwant 1111111111111111111111111111111111111111 ofs-delta sysref=HEAD:/refs/heads/master\n" +
@@ -65,10 +60,10 @@ func ExampleDecoder_Decode() {
input := strings.NewReader(raw)
// Create the Decoder reading from our input.
- d := NewDecoder(input)
+ d := NewUlReqDecoder(input)
// Decode the input into a newly allocated UlReq value.
- ur := New()
+ ur := NewUlReq()
_ = d.Decode(ur) // error check ignored for brevity
// Do something interesting with the UlReq, e.g. print its contents.
diff --git a/plumbing/transport/fetch_pack.go b/plumbing/transport/fetch_pack.go
index 5346e9d..14d3e19 100644
--- a/plumbing/transport/fetch_pack.go
+++ b/plumbing/transport/fetch_pack.go
@@ -8,14 +8,13 @@ import (
"strings"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp"
"gopkg.in/src-d/go-git.v4/plumbing/storer"
"gopkg.in/src-d/go-git.v4/storage/memory"
)
-//TODO: Replace this by advrefs.AdvRefs.
+//TODO: Replace this by packpAdvRefs.
type UploadPackInfo struct {
Capabilities *packp.Capabilities
Refs memory.ReferenceStorage
@@ -29,10 +28,10 @@ func NewUploadPackInfo() *UploadPackInfo {
}
func (i *UploadPackInfo) Decode(r io.Reader) error {
- d := advrefs.NewDecoder(r)
- ar := advrefs.New()
+ d := packp.NewAdvRefsDecoder(r)
+ ar := packp.NewAdvRefs()
if err := d.Decode(ar); err != nil {
- if err == advrefs.ErrEmpty {
+ if err == packp.ErrEmpty {
return err
}
return plumbing.NewUnexpectedError(err)
@@ -47,7 +46,7 @@ func (i *UploadPackInfo) Decode(r io.Reader) error {
return nil
}
-func (i *UploadPackInfo) addRefs(ar *advrefs.AdvRefs) error {
+func (i *UploadPackInfo) addRefs(ar *packp.AdvRefs) error {
for name, hash := range ar.References {
ref := plumbing.NewReferenceFromStrings(name, hash.String())
i.Refs.SetReference(ref)
@@ -56,7 +55,7 @@ func (i *UploadPackInfo) addRefs(ar *advrefs.AdvRefs) error {
return i.addSymbolicRefs(ar)
}
-func (i *UploadPackInfo) addSymbolicRefs(ar *advrefs.AdvRefs) error {
+func (i *UploadPackInfo) addSymbolicRefs(ar *packp.AdvRefs) error {
if !hasSymrefs(ar) {
return nil
}
@@ -76,7 +75,7 @@ func (i *UploadPackInfo) addSymbolicRefs(ar *advrefs.AdvRefs) error {
return nil
}
-func hasSymrefs(ar *advrefs.AdvRefs) bool {
+func hasSymrefs(ar *packp.AdvRefs) bool {
return ar.Capabilities.Supports("symref")
}
diff --git a/plumbing/transport/fetch_pack_test.go b/plumbing/transport/fetch_pack_test.go
index 16112a9..fc77fe2 100644
--- a/plumbing/transport/fetch_pack_test.go
+++ b/plumbing/transport/fetch_pack_test.go
@@ -5,7 +5,7 @@ import (
"encoding/base64"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp"
. "gopkg.in/check.v1"
)
@@ -59,7 +59,7 @@ func (s *UploadPackSuite) TestUploadPackInfoEmpty(c *C) {
i := NewUploadPackInfo()
err := i.Decode(b)
- c.Assert(err, Equals, advrefs.ErrEmpty)
+ c.Assert(err, Equals, packp.ErrEmpty)
}
func (s *UploadPackSuite) TestUploadPackEncode(c *C) {
diff --git a/plumbing/transport/http/fetch_pack.go b/plumbing/transport/http/fetch_pack.go
index 0becb7b..29e385b 100644
--- a/plumbing/transport/http/fetch_pack.go
+++ b/plumbing/transport/http/fetch_pack.go
@@ -8,8 +8,8 @@ import (
"strings"
"gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp"
"gopkg.in/src-d/go-git.v4/plumbing/transport"
"gopkg.in/src-d/go-git.v4/utils/ioutil"
)
@@ -64,7 +64,7 @@ func (s *fetchPackSession) AdvertisedReferences() (*transport.UploadPackInfo,
i := transport.NewUploadPackInfo()
if err := i.Decode(res.Body); err != nil {
- if err == advrefs.ErrEmpty {
+ if err == packp.ErrEmpty {
err = transport.ErrEmptyRemoteRepository
}
diff --git a/plumbing/transport/ssh/fetch_pack.go b/plumbing/transport/ssh/fetch_pack.go
index b43160a..decd9c4 100644
--- a/plumbing/transport/ssh/fetch_pack.go
+++ b/plumbing/transport/ssh/fetch_pack.go
@@ -7,9 +7,8 @@ import (
"fmt"
"io"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/advrefs"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/pktline"
- "gopkg.in/src-d/go-git.v4/plumbing/format/packp/ulreq"
+ "gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp"
"gopkg.in/src-d/go-git.v4/plumbing/transport"
"gopkg.in/src-d/go-git.v4/utils/ioutil"
@@ -49,7 +48,7 @@ func (s *fetchPackSession) AdvertisedReferences() (*transport.UploadPackInfo, er
i := transport.NewUploadPackInfo()
if err := i.Decode(s.stdout); err != nil {
- if err != advrefs.ErrEmpty {
+ if err != packp.ErrEmpty {
return nil, err
}
@@ -188,10 +187,10 @@ func fetchPack(w io.WriteCloser, r io.Reader,
}
func sendUlReq(w io.Writer, req *transport.UploadPackRequest) error {
- ur := ulreq.New()
+ ur := packp.NewUlReq()
ur.Wants = req.Wants
- ur.Depth = ulreq.DepthCommits(req.Depth)
- e := ulreq.NewEncoder(w)
+ ur.Depth = packp.DepthCommits(req.Depth)
+ e := packp.NewUlReqEncoder(w)
return e.Encode(ur)
}