aboutsummaryrefslogtreecommitdiffstats
path: root/plumbing/protocol/packp
diff options
context:
space:
mode:
authorMáximo Cuadros <mcuadros@gmail.com>2016-11-29 15:11:00 +0100
committerGitHub <noreply@github.com>2016-11-29 15:11:00 +0100
commitef1a0579fbc6aac510313ce073d1dd8fc8a9202b (patch)
tree6d41484c70cef0ec6b7582e2ac15b9daccf0e54e /plumbing/protocol/packp
parent47007c70c5a696472576a522cd0e265a777f97a8 (diff)
downloadgo-git-ef1a0579fbc6aac510313ce073d1dd8fc8a9202b.tar.gz
protocol/packp: capabilities new Capability entity and List struct, test improvements (#144)
* protocol/pakp: capabilities new Capability entity and List struct, test improvements * etc: example cloud-config file * removing sorting from List.String
Diffstat (limited to 'plumbing/protocol/packp')
-rw-r--r--plumbing/protocol/packp/advrefs.go15
-rw-r--r--plumbing/protocol/packp/advrefs_decode.go5
-rw-r--r--plumbing/protocol/packp/advrefs_decode_test.go365
-rw-r--r--plumbing/protocol/packp/advrefs_encode.go5
-rw-r--r--plumbing/protocol/packp/advrefs_encode_test.go25
-rw-r--r--plumbing/protocol/packp/advrefs_test.go21
-rw-r--r--plumbing/protocol/packp/capabilities.go136
-rw-r--r--plumbing/protocol/packp/capabilities_test.go42
-rw-r--r--plumbing/protocol/packp/capability/capability.go249
-rw-r--r--plumbing/protocol/packp/capability/list.go161
-rw-r--r--plumbing/protocol/packp/capability/list_test.go141
-rw-r--r--plumbing/protocol/packp/common.go14
-rw-r--r--plumbing/protocol/packp/ulreq.go5
-rw-r--r--plumbing/protocol/packp/ulreq_decode.go6
-rw-r--r--plumbing/protocol/packp/ulreq_decode_test.go30
-rw-r--r--plumbing/protocol/packp/ulreq_encode.go1
-rw-r--r--plumbing/protocol/packp/ulreq_encode_test.go41
-rw-r--r--plumbing/protocol/packp/ulreq_test.go11
18 files changed, 806 insertions, 467 deletions
diff --git a/plumbing/protocol/packp/advrefs.go b/plumbing/protocol/packp/advrefs.go
index b36b180..a0587ab 100644
--- a/plumbing/protocol/packp/advrefs.go
+++ b/plumbing/protocol/packp/advrefs.go
@@ -5,14 +5,11 @@ import (
"strings"
"gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability"
"gopkg.in/src-d/go-git.v4/plumbing/storer"
"gopkg.in/src-d/go-git.v4/storage/memory"
)
-const (
- symref = "symref"
-)
-
// AdvRefs values represent the information transmitted on an
// advertised-refs message. Values from this type are not zero-value
// safe, use the New function instead.
@@ -31,7 +28,7 @@ const (
type AdvRefs struct {
Prefix [][]byte // payloads of the prefix
Head *plumbing.Hash
- Capabilities *Capabilities
+ Capabilities *capability.List
References map[string]plumbing.Hash
Peeled map[string]plumbing.Hash
Shallows []plumbing.Hash
@@ -41,7 +38,7 @@ type AdvRefs struct {
func NewAdvRefs() *AdvRefs {
return &AdvRefs{
Prefix: [][]byte{},
- Capabilities: NewCapabilities(),
+ Capabilities: capability.NewList(),
References: make(map[string]plumbing.Hash),
Peeled: make(map[string]plumbing.Hash),
Shallows: []plumbing.Hash{},
@@ -52,7 +49,7 @@ func (a *AdvRefs) AddReference(r *plumbing.Reference) error {
switch r.Type() {
case plumbing.SymbolicReference:
v := fmt.Sprintf("%s:%s", r.Name().String(), r.Target().String())
- a.Capabilities.Add(symref, v)
+ a.Capabilities.Add(capability.SymRef, v)
case plumbing.HashReference:
a.References[r.Name().String()] = r.Hash()
default:
@@ -87,7 +84,7 @@ func addSymbolicRefs(s storer.ReferenceStorer, ar *AdvRefs) error {
return nil
}
- for _, symref := range ar.Capabilities.Get(symref).Values {
+ for _, symref := range ar.Capabilities.Get(capability.SymRef) {
chunks := strings.Split(symref, ":")
if len(chunks) != 2 {
err := fmt.Errorf("bad number of `:` in symref value (%q)", symref)
@@ -105,5 +102,5 @@ func addSymbolicRefs(s storer.ReferenceStorer, ar *AdvRefs) error {
}
func hasSymrefs(ar *AdvRefs) bool {
- return ar.Capabilities.Supports(symref)
+ return ar.Capabilities.Supports(capability.SymRef)
}
diff --git a/plumbing/protocol/packp/advrefs_decode.go b/plumbing/protocol/packp/advrefs_decode.go
index 8d37066..696bbae 100644
--- a/plumbing/protocol/packp/advrefs_decode.go
+++ b/plumbing/protocol/packp/advrefs_decode.go
@@ -201,9 +201,8 @@ func decodeCaps(p *advRefsDecoder) decoderStateFn {
return decodeOtherRefs
}
- for _, c := range bytes.Split(p.line, sp) {
- name, values := readCapability(c)
- p.data.Capabilities.Add(name, values...)
+ if err := p.data.Capabilities.Decode(p.line); err != nil {
+ p.error("invalid capabilities: %s", err)
}
return decodeOtherRefs
diff --git a/plumbing/protocol/packp/advrefs_decode_test.go b/plumbing/protocol/packp/advrefs_decode_test.go
index 2b3da72..82a05a4 100644
--- a/plumbing/protocol/packp/advrefs_decode_test.go
+++ b/plumbing/protocol/packp/advrefs_decode_test.go
@@ -7,6 +7,7 @@ import (
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability"
. "gopkg.in/check.v1"
)
@@ -176,82 +177,70 @@ func (s *AdvRefsDecodeSuite) TestNoCaps(c *C) {
}
func (s *AdvRefsDecodeSuite) TestCaps(c *C) {
+ type entry struct {
+ Name capability.Capability
+ Values []string
+ }
+
for _, test := range [...]struct {
input []string
- capabilities []Capability
- }{
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
- pktline.FlushString,
- },
- capabilities: []Capability{},
+ capabilities []entry
+ }{{
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00",
+ pktline.FlushString,
},
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n",
- pktline.FlushString,
- },
- capabilities: []Capability{},
+ capabilities: []entry{},
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n",
+ pktline.FlushString,
},
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta",
- pktline.FlushString,
- },
- capabilities: []Capability{
- {
- Name: "ofs-delta",
- Values: []string(nil),
- },
- },
+ capabilities: []entry{},
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta",
+ pktline.FlushString,
},
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack",
- pktline.FlushString,
- },
- capabilities: []Capability{
- {Name: "ofs-delta", Values: []string(nil)},
- {Name: "multi_ack", Values: []string(nil)},
+ capabilities: []entry{
+ {
+ Name: capability.OFSDelta,
+ Values: []string(nil),
},
},
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack\n",
- pktline.FlushString,
- },
- capabilities: []Capability{
- {Name: "ofs-delta", Values: []string(nil)},
- {Name: "multi_ack", Values: []string(nil)},
- },
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack",
+ pktline.FlushString,
},
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:refs/heads/master agent=foo=bar\n",
- pktline.FlushString,
- },
- capabilities: []Capability{
- {Name: "symref", Values: []string{"HEAD:refs/heads/master"}},
- {Name: "agent", Values: []string{"foo=bar"}},
- },
+ capabilities: []entry{
+ {Name: capability.OFSDelta, Values: []string(nil)},
+ {Name: capability.MultiACK, Values: []string(nil)},
},
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:refs/heads/master agent=foo=bar agent=new-agent\n",
- pktline.FlushString,
- },
- capabilities: []Capability{
- {Name: "symref", Values: []string{"HEAD:refs/heads/master"}},
- {Name: "agent", Values: []string{"foo=bar", "new-agent"}},
- },
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta multi_ack\n",
+ pktline.FlushString,
},
- } {
+ capabilities: []entry{
+ {Name: capability.OFSDelta, Values: []string(nil)},
+ {Name: capability.MultiACK, Values: []string(nil)},
+ },
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:refs/heads/master agent=foo=bar\n",
+ pktline.FlushString,
+ },
+ capabilities: []entry{
+ {Name: capability.SymRef, Values: []string{"HEAD:refs/heads/master"}},
+ {Name: capability.Agent, Values: []string{"foo=bar"}},
+ },
+ }} {
ar := s.testDecodeOK(c, test.input)
for _, fixCap := range test.capabilities {
c.Assert(ar.Capabilities.Supports(fixCap.Name), Equals, true,
Commentf("input = %q, capability = %q", test.input, fixCap.Name))
- c.Assert(ar.Capabilities.Get(fixCap.Name).Values, DeepEquals, fixCap.Values,
+ c.Assert(ar.Capabilities.Get(fixCap.Name), DeepEquals, fixCap.Values,
Commentf("input = %q, capability = %q", test.input, fixCap.Name))
}
}
@@ -260,7 +249,7 @@ func (s *AdvRefsDecodeSuite) TestCaps(c *C) {
func (s *AdvRefsDecodeSuite) TestWithPrefix(c *C) {
payloads := []string{
"# this is a prefix\n",
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00foo\n",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta\n",
pktline.FlushString,
}
ar := s.testDecodeOK(c, payloads)
@@ -272,7 +261,7 @@ func (s *AdvRefsDecodeSuite) TestWithPrefixAndFlush(c *C) {
payloads := []string{
"# this is a prefix\n",
pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00foo\n",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta\n",
pktline.FlushString,
}
ar := s.testDecodeOK(c, payloads)
@@ -286,98 +275,96 @@ func (s *AdvRefsDecodeSuite) TestOtherRefs(c *C) {
input []string
references map[string]plumbing.Hash
peeled map[string]plumbing.Hash
- }{
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- pktline.FlushString,
- },
- references: make(map[string]plumbing.Hash),
- peeled: make(map[string]plumbing.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- },
- peeled: make(map[string]plumbing.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo\n",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- },
- peeled: make(map[string]plumbing.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo\n",
- "2222222222222222222222222222222222222222 ref/bar",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- "ref/bar": plumbing.NewHash("2222222222222222222222222222222222222222"),
- },
- peeled: make(map[string]plumbing.Hash),
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo^{}\n",
- pktline.FlushString,
- },
- references: make(map[string]plumbing.Hash),
- peeled: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- },
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "1111111111111111111111111111111111111111 ref/foo\n",
- "2222222222222222222222222222222222222222 ref/bar^{}",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
- },
- peeled: map[string]plumbing.Hash{
- "ref/bar": plumbing.NewHash("2222222222222222222222222222222222222222"),
- },
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "51b8b4fb32271d39fbdd760397406177b2b0fd36 refs/pull/10/head\n",
- "02b5a6031ba7a8cbfde5d65ff9e13ecdbc4a92ca refs/pull/100/head\n",
- "c284c212704c43659bf5913656b8b28e32da1621 refs/pull/100/merge\n",
- "3d6537dce68c8b7874333a1720958bd8db3ae8ca refs/pull/101/merge\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11^{}\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- pktline.FlushString,
- },
- references: map[string]plumbing.Hash{
- "refs/heads/master": plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
- "refs/pull/10/head": plumbing.NewHash("51b8b4fb32271d39fbdd760397406177b2b0fd36"),
- "refs/pull/100/head": plumbing.NewHash("02b5a6031ba7a8cbfde5d65ff9e13ecdbc4a92ca"),
- "refs/pull/100/merge": plumbing.NewHash("c284c212704c43659bf5913656b8b28e32da1621"),
- "refs/pull/101/merge": plumbing.NewHash("3d6537dce68c8b7874333a1720958bd8db3ae8ca"),
- "refs/tags/v2.6.11": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- "refs/tags/v2.6.11-tree": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
- },
- peeled: map[string]plumbing.Hash{
- "refs/tags/v2.6.11": plumbing.NewHash("c39ae07f393806ccf406ef966e9a15afc43cc36a"),
- "refs/tags/v2.6.11-tree": plumbing.NewHash("c39ae07f393806ccf406ef966e9a15afc43cc36a"),
- },
+ }{{
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ pktline.FlushString,
+ },
+ references: make(map[string]plumbing.Hash),
+ peeled: make(map[string]plumbing.Hash),
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ "1111111111111111111111111111111111111111 ref/foo",
+ pktline.FlushString,
+ },
+ references: map[string]plumbing.Hash{
+ "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
+ },
+ peeled: make(map[string]plumbing.Hash),
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ "1111111111111111111111111111111111111111 ref/foo\n",
+ pktline.FlushString,
+ },
+ references: map[string]plumbing.Hash{
+ "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
+ },
+ peeled: make(map[string]plumbing.Hash),
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ "1111111111111111111111111111111111111111 ref/foo\n",
+ "2222222222222222222222222222222222222222 ref/bar",
+ pktline.FlushString,
+ },
+ references: map[string]plumbing.Hash{
+ "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
+ "ref/bar": plumbing.NewHash("2222222222222222222222222222222222222222"),
+ },
+ peeled: make(map[string]plumbing.Hash),
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ "1111111111111111111111111111111111111111 ref/foo^{}\n",
+ pktline.FlushString,
+ },
+ references: make(map[string]plumbing.Hash),
+ peeled: map[string]plumbing.Hash{
+ "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
},
- } {
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ "1111111111111111111111111111111111111111 ref/foo\n",
+ "2222222222222222222222222222222222222222 ref/bar^{}",
+ pktline.FlushString,
+ },
+ references: map[string]plumbing.Hash{
+ "ref/foo": plumbing.NewHash("1111111111111111111111111111111111111111"),
+ },
+ peeled: map[string]plumbing.Hash{
+ "ref/bar": plumbing.NewHash("2222222222222222222222222222222222222222"),
+ },
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
+ "51b8b4fb32271d39fbdd760397406177b2b0fd36 refs/pull/10/head\n",
+ "02b5a6031ba7a8cbfde5d65ff9e13ecdbc4a92ca refs/pull/100/head\n",
+ "c284c212704c43659bf5913656b8b28e32da1621 refs/pull/100/merge\n",
+ "3d6537dce68c8b7874333a1720958bd8db3ae8ca refs/pull/101/merge\n",
+ "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11\n",
+ "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11^{}\n",
+ "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
+ "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
+ pktline.FlushString,
+ },
+ references: map[string]plumbing.Hash{
+ "refs/heads/master": plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
+ "refs/pull/10/head": plumbing.NewHash("51b8b4fb32271d39fbdd760397406177b2b0fd36"),
+ "refs/pull/100/head": plumbing.NewHash("02b5a6031ba7a8cbfde5d65ff9e13ecdbc4a92ca"),
+ "refs/pull/100/merge": plumbing.NewHash("c284c212704c43659bf5913656b8b28e32da1621"),
+ "refs/pull/101/merge": plumbing.NewHash("3d6537dce68c8b7874333a1720958bd8db3ae8ca"),
+ "refs/tags/v2.6.11": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
+ "refs/tags/v2.6.11-tree": plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"),
+ },
+ peeled: map[string]plumbing.Hash{
+ "refs/tags/v2.6.11": plumbing.NewHash("c39ae07f393806ccf406ef966e9a15afc43cc36a"),
+ "refs/tags/v2.6.11-tree": plumbing.NewHash("c39ae07f393806ccf406ef966e9a15afc43cc36a"),
+ },
+ }} {
ar := s.testDecodeOK(c, test.input)
comment := Commentf("input = %v\n", test.input)
c.Assert(ar.References, DeepEquals, test.references, comment)
@@ -409,42 +396,40 @@ func (s *AdvRefsDecodeSuite) TestShallow(c *C) {
for _, test := range [...]struct {
input []string
shallows []plumbing.Hash
- }{
- {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- pktline.FlushString,
- },
- shallows: []plumbing.Hash{},
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- pktline.FlushString,
- },
- shallows: []plumbing.Hash{plumbing.NewHash("1111111111111111111111111111111111111111")},
- }, {
- input: []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
- "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
- "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
- "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
- "shallow 1111111111111111111111111111111111111111\n",
- "shallow 2222222222222222222222222222222222222222\n",
- pktline.FlushString,
- },
- shallows: []plumbing.Hash{
- plumbing.NewHash("1111111111111111111111111111111111111111"),
- plumbing.NewHash("2222222222222222222222222222222222222222"),
- },
+ }{{
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
+ "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
+ "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
+ pktline.FlushString,
+ },
+ shallows: []plumbing.Hash{},
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
+ "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
+ "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
+ "shallow 1111111111111111111111111111111111111111\n",
+ pktline.FlushString,
+ },
+ shallows: []plumbing.Hash{plumbing.NewHash("1111111111111111111111111111111111111111")},
+ }, {
+ input: []string{
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00ofs-delta symref=HEAD:/refs/heads/master\n",
+ "a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
+ "5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
+ "c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
+ "shallow 1111111111111111111111111111111111111111\n",
+ "shallow 2222222222222222222222222222222222222222\n",
+ pktline.FlushString,
+ },
+ shallows: []plumbing.Hash{
+ plumbing.NewHash("1111111111111111111111111111111111111111"),
+ plumbing.NewHash("2222222222222222222222222222222222222222"),
},
- } {
+ }} {
ar := s.testDecodeOK(c, test.input)
comment := Commentf("input = %v\n", test.input)
c.Assert(ar.Shallows, DeepEquals, test.shallows, comment)
diff --git a/plumbing/protocol/packp/advrefs_encode.go b/plumbing/protocol/packp/advrefs_encode.go
index 3c5df19..05a9c8e 100644
--- a/plumbing/protocol/packp/advrefs_encode.go
+++ b/plumbing/protocol/packp/advrefs_encode.go
@@ -7,6 +7,7 @@ import (
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability"
)
// Encode writes the AdvRefs encoding to a writer.
@@ -89,13 +90,11 @@ func formatSeparator(h *plumbing.Hash) string {
return head
}
-func formatCaps(c *Capabilities) string {
+func formatCaps(c *capability.List) string {
if c == nil {
return ""
}
- c.Sort()
-
return c.String()
}
diff --git a/plumbing/protocol/packp/advrefs_encode_test.go b/plumbing/protocol/packp/advrefs_encode_test.go
index 207ce58..f8cc815 100644
--- a/plumbing/protocol/packp/advrefs_encode_test.go
+++ b/plumbing/protocol/packp/advrefs_encode_test.go
@@ -6,6 +6,7 @@ import (
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability"
. "gopkg.in/check.v1"
)
@@ -52,10 +53,10 @@ func (s *AdvRefsEncodeSuite) TestHead(c *C) {
}
func (s *AdvRefsEncodeSuite) TestCapsNoHead(c *C) {
- capabilities := NewCapabilities()
- capabilities.Add("symref", "HEAD:/refs/heads/master")
- capabilities.Add("ofs-delta")
- capabilities.Add("multi_ack")
+ capabilities := capability.NewList()
+ capabilities.Add(capability.MultiACK)
+ capabilities.Add(capability.OFSDelta)
+ capabilities.Add(capability.SymRef, "HEAD:/refs/heads/master")
ar := &AdvRefs{
Capabilities: capabilities,
}
@@ -70,10 +71,10 @@ func (s *AdvRefsEncodeSuite) TestCapsNoHead(c *C) {
func (s *AdvRefsEncodeSuite) TestCapsWithHead(c *C) {
hash := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- capabilities := NewCapabilities()
- capabilities.Add("symref", "HEAD:/refs/heads/master")
- capabilities.Add("ofs-delta")
- capabilities.Add("multi_ack")
+ capabilities := capability.NewList()
+ capabilities.Add(capability.MultiACK)
+ capabilities.Add(capability.OFSDelta)
+ capabilities.Add(capability.SymRef, "HEAD:/refs/heads/master")
ar := &AdvRefs{
Head: &hash,
Capabilities: capabilities,
@@ -170,10 +171,10 @@ func (s *AdvRefsEncodeSuite) TestShallow(c *C) {
func (s *AdvRefsEncodeSuite) TestAll(c *C) {
hash := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
- capabilities := NewCapabilities()
- capabilities.Add("symref", "HEAD:/refs/heads/master")
- capabilities.Add("ofs-delta")
- capabilities.Add("multi_ack")
+ capabilities := capability.NewList()
+ capabilities.Add(capability.MultiACK)
+ capabilities.Add(capability.OFSDelta)
+ capabilities.Add(capability.SymRef, "HEAD:/refs/heads/master")
references := map[string]plumbing.Hash{
"refs/heads/master": plumbing.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"),
diff --git a/plumbing/protocol/packp/advrefs_test.go b/plumbing/protocol/packp/advrefs_test.go
index 6d9f488..1689938 100644
--- a/plumbing/protocol/packp/advrefs_test.go
+++ b/plumbing/protocol/packp/advrefs_test.go
@@ -10,6 +10,7 @@ import (
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
"gopkg.in/src-d/go-git.v4/plumbing/protocol/packp"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability"
. "gopkg.in/check.v1"
)
@@ -52,9 +53,7 @@ func (s *SuiteDecodeEncode) test(c *C, in []string, exp []string) {
obtained = buf.Bytes()
}
- c.Assert(obtained, DeepEquals, expected,
- Commentf("input = %v\nobtained = %q\nexpected = %q\n",
- in, string(obtained), string(expected)))
+ c.Assert(string(obtained), DeepEquals, string(expected))
}
func (s *SuiteDecodeEncode) TestNoHead(c *C) {
@@ -115,7 +114,7 @@ func (s *SuiteDecodeEncode) TestRefs(c *C) {
}
expected := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
"a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
"5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
"7777777777777777777777777777777777777777 refs/tags/v2.6.12-tree\n",
@@ -137,7 +136,7 @@ func (s *SuiteDecodeEncode) TestPeeled(c *C) {
}
expected := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
"a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
"5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
"c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
@@ -163,7 +162,7 @@ func (s *SuiteDecodeEncode) TestAll(c *C) {
}
expected := []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
"a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
"5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
"c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
@@ -195,7 +194,7 @@ func (s *SuiteDecodeEncode) TestAllSmart(c *C) {
expected := []string{
"# service=git-upload-pack\n",
pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
"a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
"5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
"c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
@@ -227,7 +226,7 @@ func (s *SuiteDecodeEncode) TestAllSmartBug(c *C) {
expected := []string{
"# service=git-upload-pack\n",
pktline.FlushString,
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n",
+ "6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00symref=HEAD:/refs/heads/master ofs-delta multi_ack\n",
"a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n",
"5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n",
"c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11-tree^{}\n",
@@ -278,9 +277,9 @@ func ExampleEncoder_Encode() {
ar.Head = &head
// ...add some server capabilities...
- ar.Capabilities.Add("symref", "HEAD:/refs/heads/master")
- ar.Capabilities.Add("ofs-delta")
- ar.Capabilities.Add("multi_ack")
+ ar.Capabilities.Add(capability.MultiACK)
+ ar.Capabilities.Add(capability.OFSDelta)
+ ar.Capabilities.Add(capability.SymRef, "HEAD:/refs/heads/master")
// ...add a couple of references...
ar.References["refs/heads/master"] = plumbing.NewHash("2222222222222222222222222222222222222222")
diff --git a/plumbing/protocol/packp/capabilities.go b/plumbing/protocol/packp/capabilities.go
deleted file mode 100644
index d77c2fa..0000000
--- a/plumbing/protocol/packp/capabilities.go
+++ /dev/null
@@ -1,136 +0,0 @@
-package packp
-
-import (
- "fmt"
- "sort"
- "strings"
-)
-
-// Capabilities contains all the server capabilities
-// https://github.com/git/git/blob/master/Documentation/technical/protocol-capabilities.txt
-type Capabilities struct {
- m map[string]*Capability
- o []string
-}
-
-// Capability represents a server capability
-type Capability struct {
- Name string
- Values []string
-}
-
-// NewCapabilities returns a new Capabilities struct
-func NewCapabilities() *Capabilities {
- return &Capabilities{
- m: make(map[string]*Capability),
- }
-}
-
-func (c *Capabilities) IsEmpty() bool {
- return len(c.o) == 0
-}
-
-// Decode decodes a string
-func (c *Capabilities) Decode(raw string) {
- params := strings.Split(raw, " ")
- for _, p := range params {
- s := strings.SplitN(p, "=", 2)
-
- var value string
- if len(s) == 2 {
- value = s[1]
- }
-
- c.Add(s[0], value)
- }
-}
-
-// Get returns the values for a capability
-func (c *Capabilities) Get(capability string) *Capability {
- return c.m[capability]
-}
-
-// Set sets a capability removing the values
-func (c *Capabilities) Set(capability string, values ...string) {
- if _, ok := c.m[capability]; ok {
- delete(c.m, capability)
- }
-
- c.Add(capability, values...)
-}
-
-// Add adds a capability, values are optional
-func (c *Capabilities) Add(capability string, values ...string) {
- if !c.Supports(capability) {
- c.m[capability] = &Capability{Name: capability}
- c.o = append(c.o, capability)
- }
-
- if len(values) == 0 {
- return
- }
-
- c.m[capability].Values = append(c.m[capability].Values, values...)
-}
-
-// Supports returns true if capability is present
-func (c *Capabilities) Supports(capability string) bool {
- _, ok := c.m[capability]
- return ok
-}
-
-// SymbolicReference returns the reference for a given symbolic reference
-func (c *Capabilities) SymbolicReference(sym string) string {
- if !c.Supports("symref") {
- return ""
- }
-
- for _, symref := range c.Get("symref").Values {
- parts := strings.Split(symref, ":")
- if len(parts) != 2 {
- continue
- }
-
- if parts[0] == sym {
- return parts[1]
- }
- }
-
- return ""
-}
-
-// Sorts capabilities in increasing order of their name
-func (c *Capabilities) Sort() {
- sort.Strings(c.o)
-}
-
-func (c *Capabilities) String() string {
- if len(c.o) == 0 {
- return ""
- }
-
- var o string
- for _, key := range c.o {
- cap := c.m[key]
-
- added := false
- for _, value := range cap.Values {
- if value == "" {
- continue
- }
-
- added = true
- o += fmt.Sprintf("%s=%s ", key, value)
- }
-
- if len(cap.Values) == 0 || !added {
- o += key + " "
- }
- }
-
- if len(o) == 0 {
- return o
- }
-
- return o[:len(o)-1]
-}
diff --git a/plumbing/protocol/packp/capabilities_test.go b/plumbing/protocol/packp/capabilities_test.go
deleted file mode 100644
index a909e4c..0000000
--- a/plumbing/protocol/packp/capabilities_test.go
+++ /dev/null
@@ -1,42 +0,0 @@
-package packp
-
-import (
- . "gopkg.in/check.v1"
-)
-
-type SuiteCapabilities struct{}
-
-var _ = Suite(&SuiteCapabilities{})
-
-func (s *SuiteCapabilities) TestDecode(c *C) {
- cap := NewCapabilities()
- cap.Decode("symref=foo symref=qux thin-pack")
-
- c.Assert(cap.m, HasLen, 2)
- c.Assert(cap.Get("symref").Values, DeepEquals, []string{"foo", "qux"})
- c.Assert(cap.Get("thin-pack").Values, DeepEquals, []string{""})
-}
-
-func (s *SuiteCapabilities) TestSet(c *C) {
- cap := NewCapabilities()
- cap.Add("symref", "foo", "qux")
- cap.Set("symref", "bar")
-
- c.Assert(cap.m, HasLen, 1)
- c.Assert(cap.Get("symref").Values, DeepEquals, []string{"bar"})
-}
-
-func (s *SuiteCapabilities) TestSetEmpty(c *C) {
- cap := NewCapabilities()
- cap.Set("foo", "bar")
-
- c.Assert(cap.Get("foo").Values, HasLen, 1)
-}
-
-func (s *SuiteCapabilities) TestAdd(c *C) {
- cap := NewCapabilities()
- cap.Add("symref", "foo", "qux")
- cap.Add("thin-pack")
-
- c.Assert(cap.String(), Equals, "symref=foo symref=qux thin-pack")
-}
diff --git a/plumbing/protocol/packp/capability/capability.go b/plumbing/protocol/packp/capability/capability.go
new file mode 100644
index 0000000..06fbfca
--- /dev/null
+++ b/plumbing/protocol/packp/capability/capability.go
@@ -0,0 +1,249 @@
+package capability
+
+// Capability describes a server or client capability
+type Capability string
+
+func (n Capability) String() string {
+ return string(n)
+}
+
+const (
+ // MultiACK capability allows the server to return "ACK obj-id continue" as
+ // soon as it finds a commit that it can use as a common base, between the
+ // client's wants and the client's have set.
+ //
+ // By sending this early, the server can potentially head off the client
+ // from walking any further down that particular branch of the client's
+ // repository history. The client may still need to walk down other
+ // branches, sending have lines for those, until the server has a
+ // complete cut across the DAG, or the client has said "done".
+ //
+ // Without multi_ack, a client sends have lines in --date-order until
+ // the server has found a common base. That means the client will send
+ // have lines that are already known by the server to be common, because
+ // they overlap in time with another branch that the server hasn't found
+ // a common base on yet.
+ //
+ // For example suppose the client has commits in caps that the server
+ // doesn't and the server has commits in lower case that the client
+ // doesn't, as in the following diagram:
+ //
+ // +---- u ---------------------- x
+ // / +----- y
+ // / /
+ // a -- b -- c -- d -- E -- F
+ // \
+ // +--- Q -- R -- S
+ //
+ // If the client wants x,y and starts out by saying have F,S, the server
+ // doesn't know what F,S is. Eventually the client says "have d" and
+ // the server sends "ACK d continue" to let the client know to stop
+ // walking down that line (so don't send c-b-a), but it's not done yet,
+ // it needs a base for x. The client keeps going with S-R-Q, until a
+ // gets reached, at which point the server has a clear base and it all
+ // ends.
+ //
+ // Without multi_ack the client would have sent that c-b-a chain anyway,
+ // interleaved with S-R-Q.
+ MultiACK Capability = "multi_ack"
+ // MultiACKDetailed is an extension of multi_ack that permits client to
+ // better understand the server's in-memory state.
+ MultiACKDetailed Capability = "multi_ack_detailed"
+ // NoDone should only be used with the smart HTTP protocol. If
+ // multi_ack_detailed and no-done are both present, then the sender is
+ // free to immediately send a pack following its first "ACK obj-id ready"
+ // message.
+ //
+ // Without no-done in the smart HTTP protocol, the server session would
+ // end and the client has to make another trip to send "done" before
+ // the server can send the pack. no-done removes the last round and
+ // thus slightly reduces latency.
+ NoDone Capability = "no-done"
+ // ThinPack is one with deltas which reference base objects not
+ // contained within the pack (but are known to exist at the receiving
+ // end). This can reduce the network traffic significantly, but it
+ // requires the receiving end to know how to "thicken" these packs by
+ // adding the missing bases to the pack.
+ //
+ // The upload-pack server advertises 'thin-pack' when it can generate
+ // and send a thin pack. A client requests the 'thin-pack' capability
+ // when it understands how to "thicken" it, notifying the server that
+ // it can receive such a pack. A client MUST NOT request the
+ // 'thin-pack' capability if it cannot turn a thin pack into a
+ // self-contained pack.
+ //
+ // Receive-pack, on the other hand, is assumed by default to be able to
+ // handle thin packs, but can ask the client not to use the feature by
+ // advertising the 'no-thin' capability. A client MUST NOT send a thin
+ // pack if the server advertises the 'no-thin' capability.
+ //
+ // The reasons for this asymmetry are historical. The receive-pack
+ // program did not exist until after the invention of thin packs, so
+ // historically the reference implementation of receive-pack always
+ // understood thin packs. Adding 'no-thin' later allowed receive-pack
+ // to disable the feature in a backwards-compatible manner.
+ ThinPack Capability = "thin-pack"
+ // Sideband means that server can send, and client understand multiplexed
+ // progress reports and error info interleaved with the packfile itself.
+ //
+ // These two options are mutually exclusive. A modern client always
+ // favors Sideband64k.
+ //
+ // Either mode indicates that the packfile data will be streamed broken
+ // up into packets of up to either 1000 bytes in the case of 'side_band',
+ // or 65520 bytes in the case of 'side_band_64k'. Each packet is made up
+ // of a leading 4-byte pkt-line length of how much data is in the packet,
+ // followed by a 1-byte stream code, followed by the actual data.
+ //
+ // The stream code can be one of:
+ //
+ // 1 - pack data
+ // 2 - progress messages
+ // 3 - fatal error message just before stream aborts
+ //
+ // The "side-band-64k" capability came about as a way for newer clients
+ // that can handle much larger packets to request packets that are
+ // actually crammed nearly full, while maintaining backward compatibility
+ // for the older clients.
+ //
+ // Further, with side-band and its up to 1000-byte messages, it's actually
+ // 999 bytes of payload and 1 byte for the stream code. With side-band-64k,
+ // same deal, you have up to 65519 bytes of data and 1 byte for the stream
+ // code.
+ //
+ // The client MUST send only maximum of one of "side-band" and "side-
+ // band-64k". Server MUST diagnose it as an error if client requests
+ // both.
+ Sideband Capability = "side-band"
+ Sideband64k Capability = "side-band-64k"
+ // OFSDelta server can send, and client understand PACKv2 with delta
+ // referring to its base by position in pack rather than by an obj-id. That
+ // is, they can send/read OBJ_OFS_DELTA (aka type 6) in a packfile.
+ OFSDelta Capability = "ofs-delta"
+ // Agent the server may optionally send this capability to notify the client
+ // that the server is running version `X`. The client may optionally return
+ // its own agent string by responding with an `agent=Y` capability (but it
+ // MUST NOT do so if the server did not mention the agent capability). The
+ // `X` and `Y` strings may contain any printable ASCII characters except
+ // space (i.e., the byte range 32 < x < 127), and are typically of the form
+ // "package/version" (e.g., "git/1.8.3.1"). The agent strings are purely
+ // informative for statistics and debugging purposes, and MUST NOT be used
+ // to programmatically assume the presence or absence of particular features.
+ Agent Capability = "agent"
+ // Shallow capability adds "deepen", "shallow" and "unshallow" commands to
+ // the fetch-pack/upload-pack protocol so clients can request shallow
+ // clones.
+ Shallow Capability = "shallow"
+ // DeepenSince adds "deepen-since" command to fetch-pack/upload-pack
+ // protocol so the client can request shallow clones that are cut at a
+ // specific time, instead of depth. Internally it's equivalent of doing
+ // "rev-list --max-age=<timestamp>" on the server side. "deepen-since"
+ // cannot be used with "deepen".
+ DeepenSince Capability = "deepen-since"
+ // DeepenNot adds "deepen-not" command to fetch-pack/upload-pack
+ // protocol so the client can request shallow clones that are cut at a
+ // specific revision, instead of depth. Internally it's equivalent of
+ // doing "rev-list --not <rev>" on the server side. "deepen-not"
+ // cannot be used with "deepen", but can be used with "deepen-since".
+ DeepenNot Capability = "deepen-not"
+ // DeepenRelative if this capability is requested by the client, the
+ // semantics of "deepen" command is changed. The "depth" argument is the
+ // depth from the current shallow boundary, instead of the depth from
+ // remote refs.
+ DeepenRelative Capability = "deepen-relative"
+ // NoProgress the client was started with "git clone -q" or something, and
+ // doesn't want that side band 2. Basically the client just says "I do not
+ // wish to receive stream 2 on sideband, so do not send it to me, and if
+ // you did, I will drop it on the floor anyway". However, the sideband
+ // channel 3 is still used for error responses.
+ NoProgress Capability = "no-progress"
+ // IncludeTag capability is about sending annotated tags if we are
+ // sending objects they point to. If we pack an object to the client, and
+ // a tag object points exactly at that object, we pack the tag object too.
+ // In general this allows a client to get all new annotated tags when it
+ // fetches a branch, in a single network connection.
+ //
+ // Clients MAY always send include-tag, hardcoding it into a request when
+ // the server advertises this capability. The decision for a client to
+ // request include-tag only has to do with the client's desires for tag
+ // data, whether or not a server had advertised objects in the
+ // refs/tags/* namespace.
+ //
+ // Servers MUST pack the tags if their referrant is packed and the client
+ // has requested include-tags.
+ //
+ // Clients MUST be prepared for the case where a server has ignored
+ // include-tag and has not actually sent tags in the pack. In such
+ // cases the client SHOULD issue a subsequent fetch to acquire the tags
+ // that include-tag would have otherwise given the client.
+ //
+ // The server SHOULD send include-tag, if it supports it, regardless
+ // of whether or not there are tags available.
+ IncludeTag Capability = "include-tag"
+ // ReportStatus the receive-pack process can receive a 'report-status'
+ // capability, which tells it that the client wants a report of what
+ // happened after a packfile upload and reference update. If the pushing
+ // client requests this capability, after unpacking and updating references
+ // the server will respond with whether the packfile unpacked successfully
+ // and if each reference was updated successfully. If any of those were not
+ // successful, it will send back an error message. See pack-protocol.txt
+ // for example messages.
+ ReportStatus Capability = "report-status"
+ // DeleteRefs If the server sends back this capability, it means that
+ // it is capable of accepting a zero-id value as the target
+ // value of a reference update. It is not sent back by the client, it
+ // simply informs the client that it can be sent zero-id values
+ // to delete references
+ DeleteRefs Capability = "delete-refs"
+ // Quiet If the receive-pack server advertises this capability, it is
+ // capable of silencing human-readable progress output which otherwise may
+ // be shown when processing the received pack. A send-pack client should
+ // respond with the 'quiet' capability to suppress server-side progress
+ // reporting if the local progress reporting is also being suppressed
+ // (e.g., via `push -q`, or if stderr does not go to a tty).
+ Quiet Capability = "quiet"
+ // Atomic If the server sends this capability it is capable of accepting
+ // atomic pushes. If the pushing client requests this capability, the server
+ // will update the refs in one atomic transaction. Either all refs are
+ // updated or none.
+ Atomic Capability = "atomic"
+ // PushOptions If the server sends this capability it is able to accept
+ // push options after the update commands have been sent, but before the
+ // packfile is streamed. If the pushing client requests this capability,
+ // the server will pass the options to the pre- and post- receive hooks
+ // that process this push request.
+ PushOptions Capability = "push-options"
+ // AllowTipSHA1InWant if the upload-pack server advertises this capability,
+ // fetch-pack may send "want" lines with SHA-1s that exist at the server but
+ // are not advertised by upload-pack.
+ AllowTipSHA1InWant Capability = "allow-tip-sha1-in-want"
+ // AllowReachableSHA1InWant if the upload-pack server advertises this
+ // capability, fetch-pack may send "want" lines with SHA-1s that exist at
+ // the server but are not advertised by upload-pack.
+ AllowReachableSHA1InWant Capability = "allow-reachable-sha1-in-want"
+ // PushCert the receive-pack server that advertises this capability is
+ // willing to accept a signed push certificate, and asks the <nonce> to be
+ // included in the push certificate. A send-pack client MUST NOT
+ // send a push-cert packet unless the receive-pack server advertises
+ // this capability.
+ PushCert Capability = "push-cert"
+ // SymRef symbolic reference support for better negotiation.
+ SymRef Capability = "symref"
+)
+
+var valid = map[Capability]bool{
+ MultiACK: true, MultiACKDetailed: true, NoDone: true, ThinPack: true,
+ Sideband: true, Sideband64k: true, OFSDelta: true, Agent: true,
+ Shallow: true, DeepenSince: true, DeepenNot: true, DeepenRelative: true,
+ NoProgress: true, IncludeTag: true, ReportStatus: true, DeleteRefs: true,
+ Quiet: true, Atomic: true, PushOptions: true, AllowTipSHA1InWant: true,
+ AllowReachableSHA1InWant: true, PushCert: true, SymRef: true,
+}
+
+var requiresArgument = map[Capability]bool{
+ Agent: true, PushCert: true, SymRef: true,
+}
+
+var multipleArgument = map[Capability]bool{
+ SymRef: true,
+}
diff --git a/plumbing/protocol/packp/capability/list.go b/plumbing/protocol/packp/capability/list.go
new file mode 100644
index 0000000..73d1f25
--- /dev/null
+++ b/plumbing/protocol/packp/capability/list.go
@@ -0,0 +1,161 @@
+package capability
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "strings"
+)
+
+var (
+ // ErrUnknownCapability is returned if a unknown capability is given
+ ErrUnknownCapability = errors.New("unknown capability")
+ // ErrArgumentsRequired is returned if no arguments are giving with a
+ // capability that requires arguments
+ ErrArgumentsRequired = errors.New("arguments required")
+ // ErrArguments is returned if arguments are given with a capabilities that
+ // not supports arguments
+ ErrArguments = errors.New("arguments not allowed")
+ // ErrEmtpyArgument is returned when an empty value is given
+ ErrEmtpyArgument = errors.New("empty argument")
+ // ErrMultipleArguments multiple argument given to a capabilities that not
+ // support it
+ ErrMultipleArguments = errors.New("multiple arguments not allowed")
+)
+
+// List represents a list of capabilities
+type List struct {
+ m map[Capability]*entry
+ sort []string
+}
+
+type entry struct {
+ Name Capability
+ Values []string
+}
+
+// NewList returns a new List of capabilities
+func NewList() *List {
+ return &List{
+ m: make(map[Capability]*entry),
+ }
+}
+
+// IsEmpty returns true if the List is empty
+func (l *List) IsEmpty() bool {
+ return len(l.sort) == 0
+}
+
+// Decode decodes list of capabilities from raw into the list
+func (l *List) Decode(raw []byte) error {
+ for _, data := range bytes.Split(raw, []byte{' '}) {
+ pair := bytes.SplitN(data, []byte{'='}, 2)
+
+ c := Capability(pair[0])
+ if len(pair) == 1 {
+ if err := l.Add(c); err != nil {
+ return err
+ }
+
+ continue
+ }
+
+ if err := l.Add(c, string(pair[1])); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// Get returns the values for a capability
+func (l *List) Get(capability Capability) []string {
+ if _, ok := l.m[capability]; !ok {
+ return nil
+ }
+
+ return l.m[capability].Values
+}
+
+// Set sets a capability removing the previous values
+func (l *List) Set(capability Capability, values ...string) error {
+ if _, ok := l.m[capability]; ok {
+ delete(l.m, capability)
+ }
+
+ return l.Add(capability, values...)
+}
+
+// Add adds a capability, values are optional
+func (l *List) Add(c Capability, values ...string) error {
+ if err := l.validate(c, values); err != nil {
+ return err
+ }
+
+ if !l.Supports(c) {
+ l.m[c] = &entry{Name: c}
+ l.sort = append(l.sort, c.String())
+ }
+
+ if len(values) == 0 {
+ return nil
+ }
+
+ if !multipleArgument[c] && len(l.m[c].Values) > 0 {
+ return ErrMultipleArguments
+ }
+
+ l.m[c].Values = append(l.m[c].Values, values...)
+ return nil
+}
+
+func (l *List) validate(c Capability, values []string) error {
+ if _, ok := valid[c]; !ok {
+ return ErrUnknownCapability
+ }
+
+ if requiresArgument[c] && len(values) == 0 {
+ return ErrArgumentsRequired
+ }
+
+ if !requiresArgument[c] && len(values) != 0 {
+ return ErrArguments
+ }
+
+ if !multipleArgument[c] && len(values) > 1 {
+ return ErrMultipleArguments
+ }
+
+ for _, v := range values {
+ if v == "" {
+ return ErrEmtpyArgument
+ }
+ }
+
+ return nil
+}
+
+// Supports returns true if capability is present
+func (l *List) Supports(capability Capability) bool {
+ _, ok := l.m[capability]
+ return ok
+}
+
+// String generates the capabilities strings, the capabilities are sorted in
+// insertion order
+func (l *List) String() string {
+ var o []string
+ for _, key := range l.sort {
+ cap := l.m[Capability(key)]
+ if len(cap.Values) == 0 {
+ o = append(o, key)
+ continue
+ }
+
+ for _, value := range cap.Values {
+ o = append(o, fmt.Sprintf("%s=%s", key, value))
+ }
+ }
+
+ return strings.Join(o, " ")
+}
diff --git a/plumbing/protocol/packp/capability/list_test.go b/plumbing/protocol/packp/capability/list_test.go
new file mode 100644
index 0000000..6d350b0
--- /dev/null
+++ b/plumbing/protocol/packp/capability/list_test.go
@@ -0,0 +1,141 @@
+package capability
+
+import (
+ "testing"
+
+ check "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) { check.TestingT(t) }
+
+type SuiteCapabilities struct{}
+
+var _ = check.Suite(&SuiteCapabilities{})
+
+func (s *SuiteCapabilities) TestIsEmpty(c *check.C) {
+ cap := NewList()
+ c.Assert(cap.IsEmpty(), check.Equals, true)
+}
+
+func (s *SuiteCapabilities) TestDecode(c *check.C) {
+ cap := NewList()
+ err := cap.Decode([]byte("symref=foo symref=qux thin-pack"))
+ c.Assert(err, check.IsNil)
+
+ c.Assert(cap.m, check.HasLen, 2)
+ c.Assert(cap.Get(SymRef), check.DeepEquals, []string{"foo", "qux"})
+ c.Assert(cap.Get(ThinPack), check.IsNil)
+}
+
+func (s *SuiteCapabilities) TestDecodeWithErrArguments(c *check.C) {
+ cap := NewList()
+ err := cap.Decode([]byte("thin-pack=foo"))
+ c.Assert(err, check.Equals, ErrArguments)
+}
+
+func (s *SuiteCapabilities) TestDecodeWithEqual(c *check.C) {
+ cap := NewList()
+ err := cap.Decode([]byte("agent=foo=bar"))
+ c.Assert(err, check.IsNil)
+
+ c.Assert(cap.m, check.HasLen, 1)
+ c.Assert(cap.Get(Agent), check.DeepEquals, []string{"foo=bar"})
+}
+
+func (s *SuiteCapabilities) TestDecodeWithErrUnknownCapability(c *check.C) {
+ cap := NewList()
+ err := cap.Decode([]byte("foo"))
+ c.Assert(err, check.Equals, ErrUnknownCapability)
+}
+
+func (s *SuiteCapabilities) TestString(c *check.C) {
+ cap := NewList()
+ cap.Set(Agent, "bar")
+ cap.Set(SymRef, "foo:qux")
+ cap.Set(ThinPack)
+
+ c.Assert(cap.String(), check.Equals, "agent=bar symref=foo:qux thin-pack")
+}
+
+func (s *SuiteCapabilities) TestStringSort(c *check.C) {
+ cap := NewList()
+ cap.Set(Agent, "bar")
+ cap.Set(SymRef, "foo:qux")
+ cap.Set(ThinPack)
+
+ c.Assert(cap.String(), check.Equals, "agent=bar symref=foo:qux thin-pack")
+}
+
+func (s *SuiteCapabilities) TestSet(c *check.C) {
+ cap := NewList()
+ err := cap.Add(SymRef, "foo", "qux")
+ c.Assert(err, check.IsNil)
+ err = cap.Set(SymRef, "bar")
+ c.Assert(err, check.IsNil)
+
+ c.Assert(cap.m, check.HasLen, 1)
+ c.Assert(cap.Get(SymRef), check.DeepEquals, []string{"bar"})
+}
+
+func (s *SuiteCapabilities) TestSetEmpty(c *check.C) {
+ cap := NewList()
+ err := cap.Set(Agent, "bar")
+ c.Assert(err, check.IsNil)
+
+ c.Assert(cap.Get(Agent), check.HasLen, 1)
+}
+
+func (s *SuiteCapabilities) TestGetEmpty(c *check.C) {
+ cap := NewList()
+ c.Assert(cap.Get(Agent), check.HasLen, 0)
+}
+
+func (s *SuiteCapabilities) TestAdd(c *check.C) {
+ cap := NewList()
+ err := cap.Add(SymRef, "foo", "qux")
+ c.Assert(err, check.IsNil)
+
+ err = cap.Add(ThinPack)
+ c.Assert(err, check.IsNil)
+
+ c.Assert(cap.String(), check.Equals, "symref=foo symref=qux thin-pack")
+}
+
+func (s *SuiteCapabilities) TestAddErrUnknownCapability(c *check.C) {
+ cap := NewList()
+ err := cap.Add(Capability("foo"))
+ c.Assert(err, check.Equals, ErrUnknownCapability)
+}
+
+func (s *SuiteCapabilities) TestAddErrArgumentsRequired(c *check.C) {
+ cap := NewList()
+ err := cap.Add(SymRef)
+ c.Assert(err, check.Equals, ErrArgumentsRequired)
+}
+
+func (s *SuiteCapabilities) TestAddErrArgumentsNotAllowed(c *check.C) {
+ cap := NewList()
+ err := cap.Add(OFSDelta, "foo")
+ c.Assert(err, check.Equals, ErrArguments)
+}
+
+func (s *SuiteCapabilities) TestAddErrArgumendts(c *check.C) {
+ cap := NewList()
+ err := cap.Add(SymRef, "")
+ c.Assert(err, check.Equals, ErrEmtpyArgument)
+}
+
+func (s *SuiteCapabilities) TestAddErrMultipleArguments(c *check.C) {
+ cap := NewList()
+ err := cap.Add(Agent, "foo")
+ c.Assert(err, check.IsNil)
+
+ err = cap.Add(Agent, "bar")
+ c.Assert(err, check.Equals, ErrMultipleArguments)
+}
+
+func (s *SuiteCapabilities) TestAddErrMultipleArgumentsAtTheSameTime(c *check.C) {
+ cap := NewList()
+ err := cap.Add(Agent, "foo", "bar")
+ c.Assert(err, check.Equals, ErrMultipleArguments)
+}
diff --git a/plumbing/protocol/packp/common.go b/plumbing/protocol/packp/common.go
index c4b44f7..3d7786b 100644
--- a/plumbing/protocol/packp/common.go
+++ b/plumbing/protocol/packp/common.go
@@ -1,7 +1,5 @@
package packp
-import "bytes"
-
type stateFn func() stateFn
const (
@@ -31,15 +29,3 @@ var (
deepenSince = []byte("deepen-since ")
deepenReference = []byte("deepen-not ")
)
-
-// Capabilities are a single string or a name=value.
-// Even though we are only going to read at moust 1 value, we return
-// a slice of values, as Capability.Add receives that.
-func readCapability(data []byte) (name string, values []string) {
- pair := bytes.SplitN(data, []byte{'='}, 2)
- if len(pair) == 2 {
- values = append(values, string(pair[1]))
- }
-
- return string(pair[0]), values
-}
diff --git a/plumbing/protocol/packp/ulreq.go b/plumbing/protocol/packp/ulreq.go
index 6ec5b96..be68b26 100644
--- a/plumbing/protocol/packp/ulreq.go
+++ b/plumbing/protocol/packp/ulreq.go
@@ -4,6 +4,7 @@ import (
"time"
"gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability"
)
// UploadRequest values represent the information transmitted on a
@@ -11,7 +12,7 @@ import (
// safe, use the New function instead.
// This is a low level type, use UploadPackRequest instead.
type UploadRequest struct {
- Capabilities *Capabilities
+ Capabilities *capability.List
Wants []plumbing.Hash
Shallows []plumbing.Hash
Depth Depth
@@ -46,7 +47,7 @@ func (d DepthReference) isDepth() {}
// wanted hash.
func NewUploadRequest() *UploadRequest {
return &UploadRequest{
- Capabilities: NewCapabilities(),
+ Capabilities: capability.NewList(),
Wants: []plumbing.Hash{},
Shallows: []plumbing.Hash{},
Depth: DepthCommits(0),
diff --git a/plumbing/protocol/packp/ulreq_decode.go b/plumbing/protocol/packp/ulreq_decode.go
index 0124cd0..812af5b 100644
--- a/plumbing/protocol/packp/ulreq_decode.go
+++ b/plumbing/protocol/packp/ulreq_decode.go
@@ -115,10 +115,8 @@ func (d *ulReqDecoder) decodeCaps() stateFn {
}
d.line = bytes.TrimPrefix(d.line, sp)
-
- for _, c := range bytes.Split(d.line, sp) {
- name, values := readCapability(c)
- d.data.Capabilities.Add(name, values...)
+ if err := d.data.Capabilities.Decode(d.line); err != nil {
+ d.error("invalid capabilities: %s", err)
}
return d.decodeOtherWants
diff --git a/plumbing/protocol/packp/ulreq_decode_test.go b/plumbing/protocol/packp/ulreq_decode_test.go
index eb12c90..e7d9d7c6 100644
--- a/plumbing/protocol/packp/ulreq_decode_test.go
+++ b/plumbing/protocol/packp/ulreq_decode_test.go
@@ -10,6 +10,7 @@ import (
"gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability"
)
type UlReqDecodeSuite struct{}
@@ -87,8 +88,8 @@ func (s *UlReqDecodeSuite) TestWantWithCapabilities(c *C) {
c.Assert(ur.Wants, DeepEquals, []plumbing.Hash{
plumbing.NewHash("1111111111111111111111111111111111111111")})
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.OFSDelta), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.MultiACK), Equals, true)
}
func (s *UlReqDecodeSuite) TestManyWantsNoCapabilities(c *C) {
@@ -168,8 +169,8 @@ func (s *UlReqDecodeSuite) TestManyWantsWithCapabilities(c *C) {
sort.Sort(byHash(expected))
c.Assert(ur.Wants, DeepEquals, expected)
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.OFSDelta), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.MultiACK), Equals, true)
}
func (s *UlReqDecodeSuite) TestSingleShallowSingleWant(c *C) {
@@ -189,8 +190,8 @@ func (s *UlReqDecodeSuite) TestSingleShallowSingleWant(c *C) {
}
c.Assert(ur.Wants, DeepEquals, expectedWants)
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.OFSDelta), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.MultiACK), Equals, true)
c.Assert(ur.Shallows, DeepEquals, expectedShallows)
}
@@ -220,8 +221,8 @@ func (s *UlReqDecodeSuite) TestSingleShallowManyWants(c *C) {
sort.Sort(byHash(ur.Wants))
c.Assert(ur.Wants, DeepEquals, expectedWants)
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.OFSDelta), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.MultiACK), Equals, true)
c.Assert(ur.Shallows, DeepEquals, expectedShallows)
}
@@ -250,8 +251,8 @@ func (s *UlReqDecodeSuite) TestManyShallowSingleWant(c *C) {
sort.Sort(byHash(expectedShallows))
c.Assert(ur.Wants, DeepEquals, expectedWants)
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.OFSDelta), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.MultiACK), Equals, true)
sort.Sort(byHash(ur.Shallows))
c.Assert(ur.Shallows, DeepEquals, expectedShallows)
@@ -289,8 +290,8 @@ func (s *UlReqDecodeSuite) TestManyShallowManyWants(c *C) {
sort.Sort(byHash(ur.Wants))
c.Assert(ur.Wants, DeepEquals, expectedWants)
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.OFSDelta), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.MultiACK), Equals, true)
sort.Sort(byHash(ur.Shallows))
c.Assert(ur.Shallows, DeepEquals, expectedShallows)
@@ -500,9 +501,8 @@ func (s *UlReqDecodeSuite) TestAll(c *C) {
sort.Sort(byHash(expectedWants))
sort.Sort(byHash(ur.Wants))
c.Assert(ur.Wants, DeepEquals, expectedWants)
-
- c.Assert(ur.Capabilities.Supports("ofs-delta"), Equals, true)
- c.Assert(ur.Capabilities.Supports("multi_ack"), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.OFSDelta), Equals, true)
+ c.Assert(ur.Capabilities.Supports(capability.MultiACK), Equals, true)
expectedShallows := []plumbing.Hash{
plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"),
diff --git a/plumbing/protocol/packp/ulreq_encode.go b/plumbing/protocol/packp/ulreq_encode.go
index b422a5f..b2ca491 100644
--- a/plumbing/protocol/packp/ulreq_encode.go
+++ b/plumbing/protocol/packp/ulreq_encode.go
@@ -63,7 +63,6 @@ func (e *ulReqEncoder) encodeFirstWant() stateFn {
if e.data.Capabilities.IsEmpty() {
err = e.pe.Encodef("want %s\n", e.sortedWants[0])
} else {
- e.data.Capabilities.Sort()
err = e.pe.Encodef(
"want %s %s\n",
e.sortedWants[0],
diff --git a/plumbing/protocol/packp/ulreq_encode_test.go b/plumbing/protocol/packp/ulreq_encode_test.go
index 1eb3175..3b3b6c2 100644
--- a/plumbing/protocol/packp/ulreq_encode_test.go
+++ b/plumbing/protocol/packp/ulreq_encode_test.go
@@ -8,6 +8,7 @@ import (
"gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
. "gopkg.in/check.v1"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability"
)
type UlReqEncodeSuite struct{}
@@ -59,14 +60,14 @@ func (s *UlReqEncodeSuite) TestOneWant(c *C) {
func (s *UlReqEncodeSuite) TestOneWantWithCapabilities(c *C) {
ur := NewUploadRequest()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Capabilities.Add("sysref", "HEAD:/refs/heads/master")
- ur.Capabilities.Add("multi_ack")
- ur.Capabilities.Add("thin-pack")
- ur.Capabilities.Add("side-band")
- ur.Capabilities.Add("ofs-delta")
+ ur.Capabilities.Add(capability.MultiACK)
+ ur.Capabilities.Add(capability.OFSDelta)
+ ur.Capabilities.Add(capability.Sideband)
+ ur.Capabilities.Add(capability.SymRef, "HEAD:/refs/heads/master")
+ ur.Capabilities.Add(capability.ThinPack)
expected := []string{
- "want 1111111111111111111111111111111111111111 multi_ack ofs-delta side-band sysref=HEAD:/refs/heads/master thin-pack\n",
+ "want 1111111111111111111111111111111111111111 multi_ack ofs-delta side-band symref=HEAD:/refs/heads/master thin-pack\n",
pktline.FlushString,
}
@@ -101,14 +102,14 @@ func (s *UlReqEncodeSuite) TestWantsWithCapabilities(c *C) {
ur.Wants = append(ur.Wants, plumbing.NewHash("2222222222222222222222222222222222222222"))
ur.Wants = append(ur.Wants, plumbing.NewHash("5555555555555555555555555555555555555555"))
- ur.Capabilities.Add("sysref", "HEAD:/refs/heads/master")
- ur.Capabilities.Add("multi_ack")
- ur.Capabilities.Add("thin-pack")
- ur.Capabilities.Add("side-band")
- ur.Capabilities.Add("ofs-delta")
+ ur.Capabilities.Add(capability.MultiACK)
+ ur.Capabilities.Add(capability.OFSDelta)
+ ur.Capabilities.Add(capability.Sideband)
+ ur.Capabilities.Add(capability.SymRef, "HEAD:/refs/heads/master")
+ ur.Capabilities.Add(capability.ThinPack)
expected := []string{
- "want 1111111111111111111111111111111111111111 multi_ack ofs-delta side-band sysref=HEAD:/refs/heads/master thin-pack\n",
+ "want 1111111111111111111111111111111111111111 multi_ack ofs-delta side-band symref=HEAD:/refs/heads/master thin-pack\n",
"want 2222222222222222222222222222222222222222\n",
"want 3333333333333333333333333333333333333333\n",
"want 4444444444444444444444444444444444444444\n",
@@ -122,7 +123,7 @@ func (s *UlReqEncodeSuite) TestWantsWithCapabilities(c *C) {
func (s *UlReqEncodeSuite) TestShallow(c *C) {
ur := NewUploadRequest()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Capabilities.Add("multi_ack")
+ ur.Capabilities.Add(capability.MultiACK)
ur.Shallows = append(ur.Shallows, plumbing.NewHash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))
expected := []string{
@@ -137,7 +138,7 @@ func (s *UlReqEncodeSuite) TestShallow(c *C) {
func (s *UlReqEncodeSuite) TestManyShallows(c *C) {
ur := NewUploadRequest()
ur.Wants = append(ur.Wants, plumbing.NewHash("1111111111111111111111111111111111111111"))
- ur.Capabilities.Add("multi_ack")
+ ur.Capabilities.Add(capability.MultiACK)
ur.Shallows = append(ur.Shallows, plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"))
ur.Shallows = append(ur.Shallows, plumbing.NewHash("dddddddddddddddddddddddddddddddddddddddd"))
ur.Shallows = append(ur.Shallows, plumbing.NewHash("cccccccccccccccccccccccccccccccccccccccc"))
@@ -225,11 +226,11 @@ func (s *UlReqEncodeSuite) TestAll(c *C) {
ur.Wants = append(ur.Wants, plumbing.NewHash("2222222222222222222222222222222222222222"))
ur.Wants = append(ur.Wants, plumbing.NewHash("5555555555555555555555555555555555555555"))
- ur.Capabilities.Add("sysref", "HEAD:/refs/heads/master")
- ur.Capabilities.Add("multi_ack")
- ur.Capabilities.Add("thin-pack")
- ur.Capabilities.Add("side-band")
- ur.Capabilities.Add("ofs-delta")
+ ur.Capabilities.Add(capability.MultiACK)
+ ur.Capabilities.Add(capability.OFSDelta)
+ ur.Capabilities.Add(capability.Sideband)
+ ur.Capabilities.Add(capability.SymRef, "HEAD:/refs/heads/master")
+ ur.Capabilities.Add(capability.ThinPack)
ur.Shallows = append(ur.Shallows, plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"))
ur.Shallows = append(ur.Shallows, plumbing.NewHash("dddddddddddddddddddddddddddddddddddddddd"))
@@ -240,7 +241,7 @@ func (s *UlReqEncodeSuite) TestAll(c *C) {
ur.Depth = DepthSince(since)
expected := []string{
- "want 1111111111111111111111111111111111111111 multi_ack ofs-delta side-band sysref=HEAD:/refs/heads/master thin-pack\n",
+ "want 1111111111111111111111111111111111111111 multi_ack ofs-delta side-band symref=HEAD:/refs/heads/master thin-pack\n",
"want 2222222222222222222222222222222222222222\n",
"want 3333333333333333333333333333333333333333\n",
"want 4444444444444444444444444444444444444444\n",
diff --git a/plumbing/protocol/packp/ulreq_test.go b/plumbing/protocol/packp/ulreq_test.go
index be02f9d..5e9e978 100644
--- a/plumbing/protocol/packp/ulreq_test.go
+++ b/plumbing/protocol/packp/ulreq_test.go
@@ -8,6 +8,7 @@ import (
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/format/pktline"
+ "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability"
)
func ExampleUlReqEncoder_Encode() {
@@ -20,8 +21,8 @@ func ExampleUlReqEncoder_Encode() {
ur.Wants = append(ur.Wants, plumbing.NewHash("2222222222222222222222222222222222222222"))
// And some capabilities you will like the server to use
- ur.Capabilities.Add("sysref", "HEAD:/refs/heads/master")
- ur.Capabilities.Add("ofs-delta")
+ ur.Capabilities.Add(capability.OFSDelta)
+ ur.Capabilities.Add(capability.SymRef, "HEAD:/refs/heads/master")
// Add a couple of shallows
ur.Shallows = append(ur.Shallows, plumbing.NewHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"))
@@ -36,7 +37,7 @@ func ExampleUlReqEncoder_Encode() {
// ...and encode the upload-request to it.
_ = e.Encode(ur) // ignoring errors for brevity
// Output:
- // 005bwant 1111111111111111111111111111111111111111 ofs-delta sysref=HEAD:/refs/heads/master
+ // 005bwant 1111111111111111111111111111111111111111 ofs-delta symref=HEAD:/refs/heads/master
// 0032want 2222222222222222222222222222222222222222
// 0032want 3333333333333333333333333333333333333333
// 0035shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
@@ -48,7 +49,7 @@ func ExampleUlReqEncoder_Encode() {
func ExampleUlReqDecoder_Decode() {
// Here is a raw advertised-ref message.
raw := "" +
- "005bwant 1111111111111111111111111111111111111111 ofs-delta sysref=HEAD:/refs/heads/master\n" +
+ "005bwant 1111111111111111111111111111111111111111 ofs-delta symref=HEAD:/refs/heads/master\n" +
"0032want 2222222222222222222222222222222222222222\n" +
"0032want 3333333333333333333333333333333333333333\n" +
"0035shallow aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n" +
@@ -79,7 +80,7 @@ func ExampleUlReqDecoder_Decode() {
fmt.Println("depth =", string(depth))
}
// Output:
- // capabilities = ofs-delta sysref=HEAD:/refs/heads/master
+ // capabilities = ofs-delta symref=HEAD:/refs/heads/master
// wants = [1111111111111111111111111111111111111111 2222222222222222222222222222222222222222 3333333333333333333333333333333333333333]
// shallows = [aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb]
// depth = 2015-01-02 03:04:05 +0000 UTC