aboutsummaryrefslogtreecommitdiffstats
path: root/plumbing/format
diff options
context:
space:
mode:
authorAntonio Jesus Navarro Perez <antnavper@gmail.com>2017-05-10 16:47:15 +0200
committerAntonio Jesus Navarro Perez <antnavper@gmail.com>2017-05-23 11:05:14 +0200
commit2f293f4a5214ccba5bdf0b82ff8b62ed39144078 (patch)
tree8942869ff31dbf3aa950c1d3cddb3a0de8ca0aa3 /plumbing/format
parent2ff77a8d93529cefdca922dbed89d4b1cd0ee8e5 (diff)
downloadgo-git-2f293f4a5214ccba5bdf0b82ff8b62ed39144078.tar.gz
format/diff: unified diff encoder and public API
- Added Patch interface - Added a Unified Diff encoder from Patches - Added Change method to generate Patches - Added Changes method to generate Patches - Added Tree method to generate Patches - Added Commit method to generate Patches
Diffstat (limited to 'plumbing/format')
-rw-r--r--plumbing/format/diff/patch.go58
-rw-r--r--plumbing/format/diff/unified_encoder.go355
-rw-r--r--plumbing/format/diff/unified_encoder_test.go829
3 files changed, 1242 insertions, 0 deletions
diff --git a/plumbing/format/diff/patch.go b/plumbing/format/diff/patch.go
new file mode 100644
index 0000000..7c6cf4a
--- /dev/null
+++ b/plumbing/format/diff/patch.go
@@ -0,0 +1,58 @@
+package diff
+
+import (
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/filemode"
+)
+
+// Operation defines the operation of a diff item.
+type Operation int
+
+const (
+ // Equal item represents a equals diff.
+ Equal Operation = iota
+ // Add item represents an insert diff.
+ Add
+ // Delete item represents a delete diff.
+ Delete
+)
+
+// Patch represents a collection of steps to transform several files.
+type Patch interface {
+ // FilePatches returns a slice of patches per file.
+ FilePatches() []FilePatch
+ // Message returns an optional message that can be at the top of the
+ // Patch representation.
+ Message() string
+}
+
+// FilePatch represents the necessary steps to transform one file to another.
+type FilePatch interface {
+ // IsBinary returns true if this patch is representing a binary file.
+ IsBinary() bool
+ // Files returns the from and to Files, with all the necessary metadata to
+ // about them. If the patch creates a new file, "from" will be nil.
+ // If the patch deletes a file, "to" will be nil.
+ Files() (from, to File)
+ // Chunks returns a slice of ordered changes to transform "from" File to
+ // "to" File. If the file is a binary one, Chunks will be empty.
+ Chunks() []Chunk
+}
+
+// File contains all the file metadata necessary to print some patch formats.
+type File interface {
+ // Hash returns the File Hash.
+ Hash() plumbing.Hash
+ // Mode returns the FileMode.
+ Mode() filemode.FileMode
+ // Path returns the complete Path to the file, including the filename.
+ Path() string
+}
+
+// Chunk represents a portion of a file transformation to another.
+type Chunk interface {
+ // Content contains the portion of the file.
+ Content() string
+ // Type contains the Operation to do with this Chunk.
+ Type() Operation
+}
diff --git a/plumbing/format/diff/unified_encoder.go b/plumbing/format/diff/unified_encoder.go
new file mode 100644
index 0000000..a4ff7ab
--- /dev/null
+++ b/plumbing/format/diff/unified_encoder.go
@@ -0,0 +1,355 @@
+package diff
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "strings"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+)
+
+const (
+ diffInit = "diff --git a/%s b/%s\n"
+
+ chunkStart = "@@ -"
+ chunkMiddle = " +"
+ chunkEnd = " @@%s\n"
+ chunkCount = "%d,%d"
+
+ noFilePath = "/dev/null"
+ aDir = "a/"
+ bDir = "b/"
+
+ fPath = "--- %s\n"
+ tPath = "+++ %s\n"
+ binary = "Binary files %s and %s differ\n"
+
+ addLine = "+%s\n"
+ deleteLine = "-%s\n"
+ equalLine = " %s\n"
+
+ oldMode = "old mode %o\n"
+ newMode = "new mode %o\n"
+ deletedFileMode = "deleted file mode %o\n"
+ newFileMode = "new file mode %o\n"
+
+ renameFrom = "from"
+ renameTo = "to"
+ renameFileMode = "rename %s %s\n"
+
+ indexAndMode = "index %s..%s %o\n"
+ indexNoMode = "index %s..%s\n"
+
+ DefaultContextLines = 3
+)
+
+var ErrBothFilesEmpty = errors.New("both files are empty")
+
+// UnifiedEncoder encodes an unified diff into the provided Writer.
+// There are some unsupported features:
+// - Similarity index for renames
+// - Sort hash representation
+type UnifiedEncoder struct {
+ io.Writer
+
+ // ctxLines is the count of unchanged lines that will appear
+ // surrounding a change.
+ ctxLines int
+
+ buf bytes.Buffer
+}
+
+func NewUnifiedEncoder(w io.Writer, ctxLines int) *UnifiedEncoder {
+ return &UnifiedEncoder{ctxLines: ctxLines, Writer: w}
+}
+
+func (e *UnifiedEncoder) Encode(patch Patch) error {
+ e.printMessage(patch.Message())
+
+ if err := e.encodeFilePatch(patch.FilePatches()); err != nil {
+ return err
+ }
+
+ _, err := e.buf.WriteTo(e)
+
+ return err
+}
+
+func (e *UnifiedEncoder) encodeFilePatch(filePatches []FilePatch) error {
+ for _, p := range filePatches {
+ f, t := p.Files()
+ if err := e.header(f, t, p.IsBinary()); err != nil {
+ return err
+ }
+
+ g := newHunksGenerator(p.Chunks(), e.ctxLines)
+ for _, c := range g.Generate() {
+ c.WriteTo(&e.buf)
+ }
+ }
+
+ return nil
+}
+
+func (e *UnifiedEncoder) printMessage(message string) {
+ isEmpty := message == ""
+ hasSuffix := strings.HasSuffix(message, "\n")
+ if !isEmpty && !hasSuffix {
+ message = message + "\n"
+ }
+
+ e.buf.WriteString(message)
+}
+
+func (e *UnifiedEncoder) header(from, to File, isBinary bool) error {
+ switch {
+ case from == nil && to == nil:
+ return ErrBothFilesEmpty
+ case from != nil && to != nil:
+ hashEquals := from.Hash() == to.Hash()
+
+ fmt.Fprintf(&e.buf, diffInit, from.Path(), to.Path())
+
+ if from.Mode() != to.Mode() {
+ fmt.Fprintf(&e.buf, oldMode+newMode, from.Mode(), to.Mode())
+ }
+
+ if from.Path() != to.Path() {
+ fmt.Fprintf(&e.buf,
+ renameFileMode+renameFileMode,
+ renameFrom, from.Path(), renameTo, to.Path())
+ }
+
+ if from.Mode() != to.Mode() && !hashEquals {
+ fmt.Fprintf(&e.buf, indexNoMode, from.Hash(), to.Hash())
+ } else if !hashEquals {
+ fmt.Fprintf(&e.buf, indexAndMode, from.Hash(), to.Hash(), from.Mode())
+ }
+
+ if !hashEquals {
+ e.pathLines(isBinary, aDir+from.Path(), bDir+to.Path())
+ }
+ case from == nil:
+ fmt.Fprintf(&e.buf, diffInit, to.Path(), to.Path())
+ fmt.Fprintf(&e.buf, newFileMode, to.Mode())
+ fmt.Fprintf(&e.buf, indexNoMode, plumbing.ZeroHash, to.Hash())
+ e.pathLines(isBinary, noFilePath, bDir+to.Path())
+ case to == nil:
+ fmt.Fprintf(&e.buf, diffInit, from.Path(), from.Path())
+ fmt.Fprintf(&e.buf, deletedFileMode, from.Mode())
+ fmt.Fprintf(&e.buf, indexNoMode, from.Hash(), plumbing.ZeroHash)
+ e.pathLines(isBinary, aDir+from.Path(), noFilePath)
+ }
+
+ return nil
+}
+
+func (e *UnifiedEncoder) pathLines(isBinary bool, fromPath, toPath string) {
+ format := fPath + tPath
+ if isBinary {
+ format = binary
+ }
+
+ fmt.Fprintf(&e.buf, format, fromPath, toPath)
+}
+
+type hunksGenerator struct {
+ fromLine, toLine int
+ ctxLines int
+ chunks []Chunk
+ current *hunk
+ hunks []*hunk
+ beforeContext, afterContext []string
+}
+
+func newHunksGenerator(chunks []Chunk, ctxLines int) *hunksGenerator {
+ return &hunksGenerator{
+ chunks: chunks,
+ ctxLines: ctxLines,
+ }
+}
+
+func (c *hunksGenerator) Generate() []*hunk {
+ for i, chunk := range c.chunks {
+ ls := splitLines(chunk.Content())
+ lsLen := len(ls)
+
+ switch chunk.Type() {
+ case Equal:
+ c.fromLine += lsLen
+ c.toLine += lsLen
+ c.processEqualsLines(ls, i)
+ case Delete:
+ if lsLen != 0 {
+ c.fromLine++
+ }
+
+ c.processHunk(i, chunk.Type())
+ c.fromLine += lsLen - 1
+ c.current.AddOp(chunk.Type(), ls...)
+ case Add:
+ if lsLen != 0 {
+ c.toLine++
+ }
+ c.processHunk(i, chunk.Type())
+ c.toLine += lsLen - 1
+ c.current.AddOp(chunk.Type(), ls...)
+ }
+
+ if i == len(c.chunks)-1 && c.current != nil {
+ c.hunks = append(c.hunks, c.current)
+ }
+ }
+
+ return c.hunks
+}
+
+func (c *hunksGenerator) processHunk(i int, op Operation) {
+ if c.current != nil {
+ return
+ }
+
+ var ctxPrefix string
+ linesBefore := len(c.beforeContext)
+ if linesBefore > c.ctxLines {
+ ctxPrefix = " " + c.beforeContext[linesBefore-c.ctxLines-1]
+ c.beforeContext = c.beforeContext[linesBefore-c.ctxLines:]
+ linesBefore = c.ctxLines
+ }
+
+ c.current = &hunk{ctxPrefix: ctxPrefix}
+ c.current.AddOp(Equal, c.beforeContext...)
+
+ switch op {
+ case Delete:
+ c.current.fromLine, c.current.toLine =
+ c.addLineNumbers(c.fromLine, c.toLine, linesBefore, i, Add)
+ case Add:
+ c.current.toLine, c.current.fromLine =
+ c.addLineNumbers(c.toLine, c.fromLine, linesBefore, i, Delete)
+ }
+
+ c.beforeContext = nil
+}
+
+// addLineNumbers obtains the line numbers in a new chunk
+func (c *hunksGenerator) addLineNumbers(la, lb int, linesBefore int, i int, op Operation) (cla, clb int) {
+ cla = la - linesBefore
+ // we need to search for a reference for the next diff
+ switch {
+ case linesBefore != 0 && c.ctxLines != 0:
+ clb = lb - c.ctxLines + 1
+ case c.ctxLines == 0:
+ clb = lb - c.ctxLines
+ case i != len(c.chunks)-1:
+ next := c.chunks[i+1]
+ if next.Type() == op || next.Type() == Equal {
+ // this diff will be into this chunk
+ clb = lb + 1
+ }
+ }
+
+ return
+}
+
+func (c *hunksGenerator) processEqualsLines(ls []string, i int) {
+ if c.current == nil {
+ c.beforeContext = append(c.beforeContext, ls...)
+ return
+ }
+
+ c.afterContext = append(c.afterContext, ls...)
+ if len(c.afterContext) <= c.ctxLines*2 && i != len(c.chunks)-1 {
+ c.current.AddOp(Equal, c.afterContext...)
+ c.afterContext = nil
+ } else {
+ c.current.AddOp(Equal, c.afterContext[:c.ctxLines]...)
+ c.hunks = append(c.hunks, c.current)
+
+ c.current = nil
+ c.beforeContext = c.afterContext[c.ctxLines:]
+ c.afterContext = nil
+ }
+}
+
+func splitLines(s string) []string {
+ out := strings.Split(s, "\n")
+ if out[len(out)-1] == "" {
+ out = out[:len(out)-1]
+ }
+
+ return out
+}
+
+type hunk struct {
+ fromLine int
+ toLine int
+
+ fromCount int
+ toCount int
+
+ ctxPrefix string
+ ops []*op
+}
+
+func (c *hunk) WriteTo(buf *bytes.Buffer) {
+ buf.WriteString(chunkStart)
+
+ if c.fromCount == 1 {
+ fmt.Fprintf(buf, "%d", c.fromLine)
+ } else {
+ fmt.Fprintf(buf, chunkCount, c.fromLine, c.fromCount)
+ }
+
+ buf.WriteString(chunkMiddle)
+
+ if c.toCount == 1 {
+ fmt.Fprintf(buf, "%d", c.toLine)
+ } else {
+ fmt.Fprintf(buf, chunkCount, c.toLine, c.toCount)
+ }
+
+ fmt.Fprintf(buf, chunkEnd, c.ctxPrefix)
+
+ for _, d := range c.ops {
+ buf.WriteString(d.String())
+ }
+}
+
+func (c *hunk) AddOp(t Operation, s ...string) {
+ ls := len(s)
+ switch t {
+ case Add:
+ c.toCount += ls
+ case Delete:
+ c.fromCount += ls
+ case Equal:
+ c.toCount += ls
+ c.fromCount += ls
+ }
+
+ for _, l := range s {
+ c.ops = append(c.ops, &op{l, t})
+ }
+}
+
+type op struct {
+ text string
+ t Operation
+}
+
+func (o *op) String() string {
+ var prefix string
+ switch o.t {
+ case Add:
+ prefix = addLine
+ case Delete:
+ prefix = deleteLine
+ case Equal:
+ prefix = equalLine
+ }
+
+ return fmt.Sprintf(prefix, o.text)
+}
diff --git a/plumbing/format/diff/unified_encoder_test.go b/plumbing/format/diff/unified_encoder_test.go
new file mode 100644
index 0000000..b832920
--- /dev/null
+++ b/plumbing/format/diff/unified_encoder_test.go
@@ -0,0 +1,829 @@
+package diff
+
+import (
+ "bytes"
+ "testing"
+
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/filemode"
+
+ . "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) { TestingT(t) }
+
+type UnifiedEncoderTestSuite struct{}
+
+var _ = Suite(&UnifiedEncoderTestSuite{})
+
+func (s *UnifiedEncoderTestSuite) TestBothFilesEmpty(c *C) {
+ buffer := bytes.NewBuffer(nil)
+ e := NewUnifiedEncoder(buffer, 1)
+ err := e.Encode(testPatch{filePatches: []testFilePatch{{}}})
+ c.Assert(err, Equals, ErrBothFilesEmpty)
+}
+
+func (s *UnifiedEncoderTestSuite) TestBinaryFile(c *C) {
+ buffer := bytes.NewBuffer(nil)
+ e := NewUnifiedEncoder(buffer, 1)
+ p := testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "binary",
+ seed: "something",
+ },
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "binary",
+ seed: "otherthing",
+ },
+ }},
+ }
+
+ err := e.Encode(p)
+ c.Assert(err, IsNil)
+
+ c.Assert(buffer.String(), Equals, `diff --git a/binary b/binary
+index a459bc245bdbc45e1bca99e7fe61731da5c48da4..6879395eacf3cc7e5634064ccb617ac7aa62be7d 100644
+Binary files a/binary and b/binary differ
+`)
+}
+
+func (s *UnifiedEncoderTestSuite) TestEncode(c *C) {
+ for _, f := range fixtures {
+ c.Log("executing: ", f.desc)
+
+ buffer := bytes.NewBuffer(nil)
+ e := NewUnifiedEncoder(buffer, f.context)
+
+ err := e.Encode(f.patch)
+ c.Assert(err, IsNil)
+
+ c.Assert(buffer.String(), Equals, f.diff)
+ }
+}
+
+var oneChunkPatch Patch = testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "onechunk.txt",
+ seed: "A\nB\nC\nD\nE\nF\nG\nH\nI\nJ\nK\nL\nM\nN\nÑ\nO\nP\nQ\nR\nS\nT\nU\nV\nW\nX\nY\nZ",
+ },
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "onechunk.txt",
+ seed: "B\nC\nD\nE\nF\nG\nI\nJ\nK\nL\nM\nN\nO\nP\nQ\nR\nS\nT\nV\nW\nX\nY\nZ",
+ },
+
+ chunks: []testChunk{{
+ content: "A\n",
+ op: Delete,
+ }, {
+ content: "B\nC\nD\nE\nF\nG",
+ op: Equal,
+ }, {
+ content: "H\n",
+ op: Delete,
+ }, {
+ content: "I\nJ\nK\nL\nM\nN\n",
+ op: Equal,
+ }, {
+ content: "Ñ\n",
+ op: Delete,
+ }, {
+ content: "O\nP\nQ\nR\nS\nT\n",
+ op: Equal,
+ }, {
+ content: "U\n",
+ op: Delete,
+ }, {
+ content: "V\nW\nX\nY\nZ",
+ op: Equal,
+ }},
+ }},
+}
+
+var oneChunkPatchInverted Patch = testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "onechunk.txt",
+ seed: "A\nB\nC\nD\nE\nF\nG\nH\nI\nJ\nK\nL\nM\nN\nÑ\nO\nP\nQ\nR\nS\nT\nU\nV\nW\nX\nY\nZ",
+ },
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "onechunk.txt",
+ seed: "B\nC\nD\nE\nF\nG\nI\nJ\nK\nL\nM\nN\nO\nP\nQ\nR\nS\nT\nV\nW\nX\nY\nZ",
+ },
+
+ chunks: []testChunk{{
+ content: "A\n",
+ op: Add,
+ }, {
+ content: "B\nC\nD\nE\nF\nG",
+ op: Equal,
+ }, {
+ content: "H\n",
+ op: Add,
+ }, {
+ content: "I\nJ\nK\nL\nM\nN\n",
+ op: Equal,
+ }, {
+ content: "Ñ\n",
+ op: Add,
+ }, {
+ content: "O\nP\nQ\nR\nS\nT\n",
+ op: Equal,
+ }, {
+ content: "U\n",
+ op: Add,
+ }, {
+ content: "V\nW\nX\nY\nZ",
+ op: Equal,
+ }},
+ }},
+}
+
+var fixtures []*fixture = []*fixture{{
+ patch: testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test",
+ },
+ to: &testFile{
+ mode: filemode.Executable,
+ path: "test.txt",
+ seed: "test",
+ },
+ chunks: nil,
+ }},
+ },
+ desc: "make executable",
+ context: 1,
+ diff: `diff --git a/test.txt b/test.txt
+old mode 100644
+new mode 100755
+`,
+}, {
+ patch: testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test",
+ },
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "test1.txt",
+ seed: "test",
+ },
+ chunks: nil,
+ }},
+ },
+ desc: "rename file",
+ context: 1,
+ diff: `diff --git a/test.txt b/test1.txt
+rename from test.txt
+rename to test1.txt
+`,
+}, {
+ patch: testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test",
+ },
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "test1.txt",
+ seed: "test1",
+ },
+ chunks: []testChunk{{
+ content: "test",
+ op: Delete,
+ }, {
+ content: "test1",
+ op: Add,
+ }},
+ }},
+ },
+ desc: "rename file with changes",
+ context: 1,
+ diff: `diff --git a/test.txt b/test1.txt
+rename from test.txt
+rename to test1.txt
+index 30d74d258442c7c65512eafab474568dd706c430..f079749c42ffdcc5f52ed2d3a6f15b09307e975e 100644
+--- a/test.txt
++++ b/test1.txt
+@@ -1 +1 @@
+-test
++test1
+`,
+}, {
+ patch: testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test",
+ },
+ to: &testFile{
+ mode: filemode.Executable,
+ path: "test1.txt",
+ seed: "test",
+ },
+ chunks: nil,
+ }},
+ },
+ desc: "rename with file mode change",
+ context: 1,
+ diff: `diff --git a/test.txt b/test1.txt
+old mode 100644
+new mode 100755
+rename from test.txt
+rename to test1.txt
+`,
+}, {
+ patch: testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test",
+ },
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test2",
+ },
+
+ chunks: []testChunk{{
+ content: "test",
+ op: Delete,
+ }, {
+ content: "test2",
+ op: Add,
+ }},
+ }},
+ },
+
+ desc: "one line change",
+ context: 1,
+ diff: `diff --git a/test.txt b/test.txt
+index 30d74d258442c7c65512eafab474568dd706c430..d606037cb232bfda7788a8322492312d55b2ae9d 100644
+--- a/test.txt
++++ b/test.txt
+@@ -1 +1 @@
+-test
++test2
+`,
+}, {
+ patch: testPatch{
+ message: "this is the message\n",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test",
+ },
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test2",
+ },
+
+ chunks: []testChunk{{
+ content: "test",
+ op: Delete,
+ }, {
+ content: "test2",
+ op: Add,
+ }},
+ }},
+ },
+
+ desc: "one line change with message",
+ context: 1,
+ diff: `this is the message
+diff --git a/test.txt b/test.txt
+index 30d74d258442c7c65512eafab474568dd706c430..d606037cb232bfda7788a8322492312d55b2ae9d 100644
+--- a/test.txt
++++ b/test.txt
+@@ -1 +1 @@
+-test
++test2
+`,
+}, {
+ patch: testPatch{
+ message: "this is the message",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test",
+ },
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "test.txt",
+ seed: "test2",
+ },
+
+ chunks: []testChunk{{
+ content: "test",
+ op: Delete,
+ }, {
+ content: "test2",
+ op: Add,
+ }},
+ }},
+ },
+
+ desc: "one line change with message and no end of line",
+ context: 1,
+ diff: `this is the message
+diff --git a/test.txt b/test.txt
+index 30d74d258442c7c65512eafab474568dd706c430..d606037cb232bfda7788a8322492312d55b2ae9d 100644
+--- a/test.txt
++++ b/test.txt
+@@ -1 +1 @@
+-test
++test2
+`,
+}, {
+ patch: testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: nil,
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "new.txt",
+ seed: "test\ntest2\test3",
+ },
+
+ chunks: []testChunk{{
+ content: "test\ntest2\ntest3",
+ op: Add,
+ }},
+ }},
+ },
+
+ desc: "new file",
+ context: 1,
+ diff: `diff --git a/new.txt b/new.txt
+new file mode 100644
+index 0000000000000000000000000000000000000000..65c8dd02a42273038658a22b1cb29c8d9457ca12
+--- /dev/null
++++ b/new.txt
+@@ -0,0 +1,3 @@
++test
++test2
++test3
+`,
+}, {
+ patch: testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "old.txt",
+ seed: "test",
+ },
+ to: nil,
+
+ chunks: []testChunk{{
+ content: "test",
+ op: Delete,
+ }},
+ }},
+ },
+
+ desc: "delete file",
+ context: 1,
+ diff: `diff --git a/old.txt b/old.txt
+deleted file mode 100644
+index 30d74d258442c7c65512eafab474568dd706c430..0000000000000000000000000000000000000000
+--- a/old.txt
++++ /dev/null
+@@ -1 +0,0 @@
+-test
+`,
+}, {
+ patch: oneChunkPatch,
+ desc: "modified deleting lines file with context to 1",
+ context: 1,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index ab5eed5d4a2c33aeef67e0188ee79bed666bde6f..0adddcde4fd38042c354518351820eb06c417c82 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -1,2 +1 @@
+-A
+ B
+@@ -7,3 +6,2 @@ F
+ G
+-H
+ I
+@@ -14,3 +12,2 @@ M
+ N
+-Ñ
+ O
+@@ -21,3 +18,2 @@ S
+ T
+-U
+ V
+`,
+}, {
+ patch: oneChunkPatch,
+ desc: "modified deleting lines file with context to 2",
+ context: 2,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index ab5eed5d4a2c33aeef67e0188ee79bed666bde6f..0adddcde4fd38042c354518351820eb06c417c82 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -1,3 +1,2 @@
+-A
+ B
+ C
+@@ -6,5 +5,4 @@ E
+ F
+ G
+-H
+ I
+ J
+@@ -13,5 +11,4 @@ L
+ M
+ N
+-Ñ
+ O
+ P
+@@ -20,5 +17,4 @@ R
+ S
+ T
+-U
+ V
+ W
+`,
+}, {
+ patch: oneChunkPatch,
+
+ desc: "modified deleting lines file with context to 3",
+ context: 3,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index ab5eed5d4a2c33aeef67e0188ee79bed666bde6f..0adddcde4fd38042c354518351820eb06c417c82 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -1,25 +1,21 @@
+-A
+ B
+ C
+ D
+ E
+ F
+ G
+-H
+ I
+ J
+ K
+ L
+ M
+ N
+-Ñ
+ O
+ P
+ Q
+ R
+ S
+ T
+-U
+ V
+ W
+ X
+`,
+}, {
+ patch: oneChunkPatch,
+ desc: "modified deleting lines file with context to 4",
+ context: 4,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index ab5eed5d4a2c33aeef67e0188ee79bed666bde6f..0adddcde4fd38042c354518351820eb06c417c82 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -1,26 +1,22 @@
+-A
+ B
+ C
+ D
+ E
+ F
+ G
+-H
+ I
+ J
+ K
+ L
+ M
+ N
+-Ñ
+ O
+ P
+ Q
+ R
+ S
+ T
+-U
+ V
+ W
+ X
+ Y
+`,
+}, {
+ patch: oneChunkPatch,
+ desc: "modified deleting lines file with context to 0",
+ context: 0,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index ab5eed5d4a2c33aeef67e0188ee79bed666bde6f..0adddcde4fd38042c354518351820eb06c417c82 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -1 +0,0 @@
+-A
+@@ -8 +6,0 @@ G
+-H
+@@ -15 +12,0 @@ N
+-Ñ
+@@ -22 +18,0 @@ T
+-U
+`,
+}, {
+ patch: oneChunkPatchInverted,
+ desc: "modified adding lines file with context to 1",
+ context: 1,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index 0adddcde4fd38042c354518351820eb06c417c82..ab5eed5d4a2c33aeef67e0188ee79bed666bde6f 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -1 +1,2 @@
++A
+ B
+@@ -6,2 +7,3 @@ F
+ G
++H
+ I
+@@ -12,2 +14,3 @@ M
+ N
++Ñ
+ O
+@@ -18,2 +21,3 @@ S
+ T
++U
+ V
+`,
+}, {
+ patch: oneChunkPatchInverted,
+ desc: "modified adding lines file with context to 2",
+ context: 2,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index 0adddcde4fd38042c354518351820eb06c417c82..ab5eed5d4a2c33aeef67e0188ee79bed666bde6f 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -1,2 +1,3 @@
++A
+ B
+ C
+@@ -5,4 +6,5 @@ E
+ F
+ G
++H
+ I
+ J
+@@ -11,4 +13,5 @@ L
+ M
+ N
++Ñ
+ O
+ P
+@@ -17,4 +20,5 @@ R
+ S
+ T
++U
+ V
+ W
+`,
+}, {
+ patch: oneChunkPatchInverted,
+ desc: "modified adding lines file with context to 3",
+ context: 3,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index 0adddcde4fd38042c354518351820eb06c417c82..ab5eed5d4a2c33aeef67e0188ee79bed666bde6f 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -1,21 +1,25 @@
++A
+ B
+ C
+ D
+ E
+ F
+ G
++H
+ I
+ J
+ K
+ L
+ M
+ N
++Ñ
+ O
+ P
+ Q
+ R
+ S
+ T
++U
+ V
+ W
+ X
+`,
+}, {
+ patch: oneChunkPatchInverted,
+ desc: "modified adding lines file with context to 4",
+ context: 4,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index 0adddcde4fd38042c354518351820eb06c417c82..ab5eed5d4a2c33aeef67e0188ee79bed666bde6f 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -1,22 +1,26 @@
++A
+ B
+ C
+ D
+ E
+ F
+ G
++H
+ I
+ J
+ K
+ L
+ M
+ N
++Ñ
+ O
+ P
+ Q
+ R
+ S
+ T
++U
+ V
+ W
+ X
+ Y
+`,
+}, {
+ patch: oneChunkPatchInverted,
+ desc: "modified adding lines file with context to 0",
+ context: 0,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index 0adddcde4fd38042c354518351820eb06c417c82..ab5eed5d4a2c33aeef67e0188ee79bed666bde6f 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -0,0 +1 @@
++A
+@@ -6,0 +8 @@ G
++H
+@@ -12,0 +15 @@ N
++Ñ
+@@ -18,0 +22 @@ T
++U
+`,
+}, {
+ patch: testPatch{
+ message: "",
+ filePatches: []testFilePatch{{
+ from: &testFile{
+ mode: filemode.Regular,
+ path: "onechunk.txt",
+ seed: "B\nC\nD\nE\nF\nG\nI\nJ\nK\nL\nM\nN\nO\nP\nQ\nR\nS\nT\nV\nW\nX\nY\nZ",
+ },
+ to: &testFile{
+ mode: filemode.Regular,
+ path: "onechunk.txt",
+ seed: "B\nC\nD\nE\nF\nG\nI\nJ\nK\nL\nM\nN\nO\nP\nQ\nR\nS\nT\nV\nW\nX\nY\n",
+ },
+
+ chunks: []testChunk{{
+ content: "B\nC\nD\nE\nF\nG\nI\nJ\nK\nL\nM\nN\nO\nP\nQ\nR\nS\nT\nV\nW\nX\nY\n",
+ op: Equal,
+ }, {
+ content: "Z",
+ op: Delete,
+ }},
+ }},
+ },
+ desc: "remove last letter",
+ context: 0,
+ diff: `diff --git a/onechunk.txt b/onechunk.txt
+index 0adddcde4fd38042c354518351820eb06c417c82..553ae669c7a9303cf848fcc749a2569228ac5309 100644
+--- a/onechunk.txt
++++ b/onechunk.txt
+@@ -23 +22,0 @@ Y
+-Z
+`,
+}}
+
+type testPatch struct {
+ message string
+ filePatches []testFilePatch
+}
+
+func (t testPatch) FilePatches() []FilePatch {
+ var result []FilePatch
+ for _, f := range t.filePatches {
+ result = append(result, f)
+ }
+
+ return result
+}
+
+func (t testPatch) Message() string {
+ return t.message
+}
+
+type testFilePatch struct {
+ from, to *testFile
+ chunks []testChunk
+}
+
+func (t testFilePatch) IsBinary() bool {
+ return len(t.chunks) == 0
+}
+func (t testFilePatch) Files() (File, File) {
+ // Go is amazing
+ switch {
+ case t.from == nil && t.to == nil:
+ return nil, nil
+ case t.from == nil:
+ return nil, t.to
+ case t.to == nil:
+ return t.from, nil
+ }
+
+ return t.from, t.to
+}
+
+func (t testFilePatch) Chunks() []Chunk {
+ var result []Chunk
+ for _, c := range t.chunks {
+ result = append(result, c)
+ }
+ return result
+}
+
+type testFile struct {
+ path string
+ mode filemode.FileMode
+ seed string
+}
+
+func (t testFile) Hash() plumbing.Hash {
+ return plumbing.ComputeHash(plumbing.BlobObject, []byte(t.seed))
+}
+
+func (t testFile) Mode() filemode.FileMode {
+ return t.mode
+}
+
+func (t testFile) Path() string {
+ return t.path
+}
+
+type testChunk struct {
+ content string
+ op Operation
+}
+
+func (t testChunk) Content() string {
+ return t.content
+}
+
+func (t testChunk) Type() Operation {
+ return t.op
+}
+
+type fixture struct {
+ desc string
+ context int
+ diff string
+ patch Patch
+}