diff options
author | Alberto Cortés <alcortesm@gmail.com> | 2016-10-26 17:56:26 +0200 |
---|---|---|
committer | Máximo Cuadros <mcuadros@gmail.com> | 2016-10-26 15:56:26 +0000 |
commit | 73fa9ef25a8af9c8337a4cf34a67cfe208f1a7c5 (patch) | |
tree | 66886d9b47e373b748c1bceafe1885e6f47868dd /formats/packp/advrefs/encoder_test.go | |
parent | f3ab3a6c73015b5ae9b2a4756dc646e1211cedb9 (diff) | |
download | go-git-73fa9ef25a8af9c8337a4cf34a67cfe208f1a7c5.tar.gz |
Use advrefs in gituploadpackinfo (#92)
* add advrefs encoder and parser
* modify advrefs encoder to resemble json encoder
* turn advrefs parser into a decoder
* clean code
* improve documentation
* improve documentation
* clean code
* upgrade to new pktline.Add and add Flush const to easy integration
* gometalinter
* Use packp/advrefs for GitUploadPackInfo parsing
- GitUploadPackInfo now uses packp/advrefs instead of parsing the
message by itself.
- Capabilities has been moved from clients/common to packp to avoid a
circular import.
- Cleaning of advrefs_test code.
- Add support for prefix encoding and decoding in advrefs.
* clean advrefs test code
* clean advrefs test code
* clean advrefs test code
* gometalinter
* add pktline encoder
* change pktline.EncodeFlush to pktline.Flush
* make scanner tests use the encoder instead of Pktlines
* check errors on flush and clean constants
* ubstitute the PktLines type with a pktline.Encoder
* use pktline.Encoder in all go-git
* add example of pktline.Encodef()
* add package overview
* documentation
* support symbolic links other than HEAD
* simplify decoding of shallows
* packp: fix mcuadros comments
- all abbreviates removed (by visual inspection, some may remain)
- all empty maps are initialized using make
- simplify readRef with a switch
- make decodeShallow malformed error more verbose
- add pktline.Encoder.encodeLine
- remove infamous panic in checkPayloadLength by refactoring out
the whole function
Diffstat (limited to 'formats/packp/advrefs/encoder_test.go')
-rw-r--r-- | formats/packp/advrefs/encoder_test.go | 249 |
1 files changed, 249 insertions, 0 deletions
diff --git a/formats/packp/advrefs/encoder_test.go b/formats/packp/advrefs/encoder_test.go new file mode 100644 index 0000000..8fb475b --- /dev/null +++ b/formats/packp/advrefs/encoder_test.go @@ -0,0 +1,249 @@ +package advrefs_test + +import ( + "bytes" + "strings" + + "gopkg.in/src-d/go-git.v4/core" + "gopkg.in/src-d/go-git.v4/formats/packp" + "gopkg.in/src-d/go-git.v4/formats/packp/advrefs" + "gopkg.in/src-d/go-git.v4/formats/packp/pktline" + + . "gopkg.in/check.v1" +) + +type SuiteEncoder struct{} + +var _ = Suite(&SuiteEncoder{}) + +// returns a byte slice with the pkt-lines for the given payloads. +func pktlines(c *C, payloads ...[]byte) []byte { + var buf bytes.Buffer + e := pktline.NewEncoder(&buf) + err := e.Encode(payloads...) + c.Assert(err, IsNil, Commentf("building pktlines for %v\n", payloads)) + + return buf.Bytes() +} + +func testEncode(c *C, input *advrefs.AdvRefs, expected []byte) { + var buf bytes.Buffer + e := advrefs.NewEncoder(&buf) + err := e.Encode(input) + c.Assert(err, IsNil) + obtained := buf.Bytes() + + comment := Commentf("\nobtained = %s\nexpected = %s\n", string(obtained), string(expected)) + + c.Assert(obtained, DeepEquals, expected, comment) +} + +func (s *SuiteEncoder) TestZeroValue(c *C) { + ar := &advrefs.AdvRefs{} + + expected := pktlines(c, + []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"), + pktline.Flush, + ) + + testEncode(c, ar, expected) +} + +func (s *SuiteEncoder) TestHead(c *C) { + hash := core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5") + ar := &advrefs.AdvRefs{ + Head: &hash, + } + + expected := pktlines(c, + []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00\n"), + pktline.Flush, + ) + + testEncode(c, ar, expected) +} + +func (s *SuiteEncoder) TestCapsNoHead(c *C) { + capabilities := packp.NewCapabilities() + capabilities.Add("symref", "HEAD:/refs/heads/master") + capabilities.Add("ofs-delta") + capabilities.Add("multi_ack") + ar := &advrefs.AdvRefs{ + Capabilities: capabilities, + } + + expected := pktlines(c, + []byte("0000000000000000000000000000000000000000 capabilities^{}\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"), + pktline.Flush, + ) + + testEncode(c, ar, expected) +} + +func (s *SuiteEncoder) TestCapsWithHead(c *C) { + hash := core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5") + capabilities := packp.NewCapabilities() + capabilities.Add("symref", "HEAD:/refs/heads/master") + capabilities.Add("ofs-delta") + capabilities.Add("multi_ack") + ar := &advrefs.AdvRefs{ + Head: &hash, + Capabilities: capabilities, + } + + expected := pktlines(c, + []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"), + pktline.Flush, + ) + + testEncode(c, ar, expected) +} + +func (s *SuiteEncoder) TestRefs(c *C) { + references := map[string]core.Hash{ + "refs/heads/master": core.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"), + "refs/tags/v2.6.12-tree": core.NewHash("1111111111111111111111111111111111111111"), + "refs/tags/v2.7.13-tree": core.NewHash("3333333333333333333333333333333333333333"), + "refs/tags/v2.6.13-tree": core.NewHash("2222222222222222222222222222222222222222"), + "refs/tags/v2.6.11-tree": core.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"), + } + ar := &advrefs.AdvRefs{ + References: references, + } + + expected := pktlines(c, + []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"), + []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"), + []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"), + []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"), + []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"), + []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"), + pktline.Flush, + ) + + testEncode(c, ar, expected) +} + +func (s *SuiteEncoder) TestPeeled(c *C) { + references := map[string]core.Hash{ + "refs/heads/master": core.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"), + "refs/tags/v2.6.12-tree": core.NewHash("1111111111111111111111111111111111111111"), + "refs/tags/v2.7.13-tree": core.NewHash("3333333333333333333333333333333333333333"), + "refs/tags/v2.6.13-tree": core.NewHash("2222222222222222222222222222222222222222"), + "refs/tags/v2.6.11-tree": core.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"), + } + peeled := map[string]core.Hash{ + "refs/tags/v2.7.13-tree": core.NewHash("4444444444444444444444444444444444444444"), + "refs/tags/v2.6.12-tree": core.NewHash("5555555555555555555555555555555555555555"), + } + ar := &advrefs.AdvRefs{ + References: references, + Peeled: peeled, + } + + expected := pktlines(c, + []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"), + []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"), + []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"), + []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"), + []byte("5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n"), + []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"), + []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"), + []byte("4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n"), + pktline.Flush, + ) + + testEncode(c, ar, expected) +} + +func (s *SuiteEncoder) TestShallow(c *C) { + shallows := []core.Hash{ + core.NewHash("1111111111111111111111111111111111111111"), + core.NewHash("4444444444444444444444444444444444444444"), + core.NewHash("3333333333333333333333333333333333333333"), + core.NewHash("2222222222222222222222222222222222222222"), + } + ar := &advrefs.AdvRefs{ + Shallows: shallows, + } + + expected := pktlines(c, + []byte("0000000000000000000000000000000000000000 capabilities^{}\x00\n"), + []byte("shallow 1111111111111111111111111111111111111111\n"), + []byte("shallow 2222222222222222222222222222222222222222\n"), + []byte("shallow 3333333333333333333333333333333333333333\n"), + []byte("shallow 4444444444444444444444444444444444444444\n"), + pktline.Flush, + ) + + testEncode(c, ar, expected) +} + +func (s *SuiteEncoder) TestAll(c *C) { + hash := core.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5") + + capabilities := packp.NewCapabilities() + capabilities.Add("symref", "HEAD:/refs/heads/master") + capabilities.Add("ofs-delta") + capabilities.Add("multi_ack") + + references := map[string]core.Hash{ + "refs/heads/master": core.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"), + "refs/tags/v2.6.12-tree": core.NewHash("1111111111111111111111111111111111111111"), + "refs/tags/v2.7.13-tree": core.NewHash("3333333333333333333333333333333333333333"), + "refs/tags/v2.6.13-tree": core.NewHash("2222222222222222222222222222222222222222"), + "refs/tags/v2.6.11-tree": core.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c"), + } + + peeled := map[string]core.Hash{ + "refs/tags/v2.7.13-tree": core.NewHash("4444444444444444444444444444444444444444"), + "refs/tags/v2.6.12-tree": core.NewHash("5555555555555555555555555555555555555555"), + } + + shallows := []core.Hash{ + core.NewHash("1111111111111111111111111111111111111111"), + core.NewHash("4444444444444444444444444444444444444444"), + core.NewHash("3333333333333333333333333333333333333333"), + core.NewHash("2222222222222222222222222222222222222222"), + } + + ar := &advrefs.AdvRefs{ + Head: &hash, + Capabilities: capabilities, + References: references, + Peeled: peeled, + Shallows: shallows, + } + + expected := pktlines(c, + []byte("6ecf0ef2c2dffb796033e5a02219af86ec6584e5 HEAD\x00multi_ack ofs-delta symref=HEAD:/refs/heads/master\n"), + []byte("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7 refs/heads/master\n"), + []byte("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11-tree\n"), + []byte("1111111111111111111111111111111111111111 refs/tags/v2.6.12-tree\n"), + []byte("5555555555555555555555555555555555555555 refs/tags/v2.6.12-tree^{}\n"), + []byte("2222222222222222222222222222222222222222 refs/tags/v2.6.13-tree\n"), + []byte("3333333333333333333333333333333333333333 refs/tags/v2.7.13-tree\n"), + []byte("4444444444444444444444444444444444444444 refs/tags/v2.7.13-tree^{}\n"), + []byte("shallow 1111111111111111111111111111111111111111\n"), + []byte("shallow 2222222222222222222222222222222222222222\n"), + []byte("shallow 3333333333333333333333333333333333333333\n"), + []byte("shallow 4444444444444444444444444444444444444444\n"), + pktline.Flush, + ) + + testEncode(c, ar, expected) +} + +func (s *SuiteEncoder) TestErrorTooLong(c *C) { + references := map[string]core.Hash{ + strings.Repeat("a", pktline.MaxPayloadSize): core.NewHash("a6930aaee06755d1bdcfd943fbf614e4d92bb0c7"), + } + ar := &advrefs.AdvRefs{ + References: references, + } + + var buf bytes.Buffer + e := advrefs.NewEncoder(&buf) + err := e.Encode(ar) + c.Assert(err, ErrorMatches, ".*payload is too long.*") +} |