aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPaulo Gomes <pjbgf@linux.com>2023-08-05 10:20:38 +0100
committerGitHub <noreply@github.com>2023-08-05 10:20:38 +0100
commite6f68d2e4cd1bc4447126816c7c27e1fc2098e30 (patch)
tree15c5e333b93641f9eadcb4bf4b34c338135f7a23
parent5882d60fb7ccd4cfc0fe69286aa96e198c9d1eb0 (diff)
parent4ec6b3f4fa9cdfe8f10d0953ac7d398d01a90f17 (diff)
downloadgo-git-e6f68d2e4cd1bc4447126816c7c27e1fc2098e30.tar.gz
Merge branch 'master' into jc/commit-ammend
-rw-r--r--.github/workflows/codeql.yml44
-rw-r--r--.github/workflows/git.yml19
-rw-r--r--.github/workflows/test.yml23
-rw-r--r--.gitignore2
-rw-r--r--COMPATIBILITY.md344
-rw-r--r--EXTENDING.md78
-rw-r--r--Makefile10
-rw-r--r--SECURITY.md38
-rw-r--r--_examples/README.md5
-rw-r--r--_examples/azure_devops/main.go56
-rw-r--r--_examples/blame/main.go48
-rw-r--r--_examples/clone/auth/ssh/main.go3
-rw-r--r--_examples/commit/main.go3
-rw-r--r--_examples/common_test.go3
-rw-r--r--_examples/ls-remote/main.go14
-rw-r--r--_examples/sha256/main.go65
-rw-r--r--_examples/tag-create-push/main.go3
-rw-r--r--blame.go612
-rw-r--r--blame_test.go134
-rw-r--r--common_test.go79
-rw-r--r--config/config.go87
-rw-r--r--config/refspec.go2
-rw-r--r--example_test.go7
-rw-r--r--go.mod53
-rw-r--r--go.sum149
-rw-r--r--internal/path_util/path_util.go29
-rw-r--r--internal/revision/parser.go4
-rw-r--r--internal/revision/parser_test.go7
-rw-r--r--internal/url/url.go6
-rwxr-xr-xinternal/url/url_test.go107
-rw-r--r--options.go71
-rw-r--r--plumbing/format/commitgraph/encoder.go9
-rw-r--r--plumbing/format/config/encoder.go17
-rw-r--r--plumbing/format/config/fixtures_test.go35
-rw-r--r--plumbing/format/config/format.go53
-rw-r--r--plumbing/format/config/section.go2
-rw-r--r--plumbing/format/diff/patch.go10
-rw-r--r--plumbing/format/gitattributes/attributes.go3
-rw-r--r--plumbing/format/gitattributes/pattern.go5
-rw-r--r--plumbing/format/gitattributes/pattern_test.go6
-rw-r--r--plumbing/format/gitignore/dir.go8
-rw-r--r--plumbing/format/gitignore/dir_test.go119
-rw-r--r--plumbing/format/gitignore/pattern.go2
-rw-r--r--plumbing/format/idxfile/decoder.go3
-rw-r--r--plumbing/format/idxfile/decoder_test.go3
-rw-r--r--plumbing/format/idxfile/encoder.go9
-rw-r--r--plumbing/format/idxfile/encoder_test.go4
-rw-r--r--plumbing/format/idxfile/idxfile.go5
-rw-r--r--plumbing/format/idxfile/writer.go29
-rw-r--r--plumbing/format/idxfile/writer_test.go6
-rw-r--r--plumbing/format/index/decoder.go9
-rw-r--r--plumbing/format/index/encoder.go5
-rw-r--r--plumbing/format/objfile/reader.go17
-rw-r--r--plumbing/format/objfile/reader_test.go3
-rw-r--r--plumbing/format/objfile/writer.go7
-rw-r--r--plumbing/format/packfile/common.go18
-rw-r--r--plumbing/format/packfile/delta_test.go10
-rw-r--r--plumbing/format/packfile/diff_delta.go21
-rw-r--r--plumbing/format/packfile/encoder.go10
-rw-r--r--plumbing/format/packfile/encoder_test.go14
-rw-r--r--plumbing/format/packfile/packfile.go34
-rw-r--r--plumbing/format/packfile/parser.go42
-rw-r--r--plumbing/format/packfile/parser_test.go54
-rw-r--r--plumbing/format/packfile/patch_delta.go17
-rw-r--r--plumbing/format/packfile/scanner.go57
-rw-r--r--plumbing/format/packfile/scanner_test.go3
-rw-r--r--plumbing/hash.go17
-rw-r--r--plumbing/hash/hash.go60
-rw-r--r--plumbing/hash/hash_sha1.go15
-rw-r--r--plumbing/hash/hash_sha256.go15
-rw-r--r--plumbing/hash/hash_test.go103
-rw-r--r--plumbing/memory.go4
-rw-r--r--plumbing/memory_test.go5
-rw-r--r--plumbing/object/blob_test.go7
-rw-r--r--plumbing/object/change.go2
-rw-r--r--plumbing/object/commit.go18
-rw-r--r--plumbing/object/commit_test.go75
-rw-r--r--plumbing/object/common.go12
-rw-r--r--plumbing/object/object_test.go3
-rw-r--r--plumbing/object/rename.go13
-rw-r--r--plumbing/object/signature.go101
-rw-r--r--plumbing/object/signature_test.go180
-rw-r--r--plumbing/object/tag.go45
-rw-r--r--plumbing/object/tag_test.go24
-rw-r--r--plumbing/object/tree.go8
-rw-r--r--plumbing/protocol/packp/advrefs.go14
-rw-r--r--plumbing/protocol/packp/advrefs_decode.go1
-rw-r--r--plumbing/protocol/packp/advrefs_decode_test.go10
-rw-r--r--plumbing/protocol/packp/capability/capability.go15
-rw-r--r--plumbing/protocol/packp/capability/capability_test.go22
-rw-r--r--plumbing/protocol/packp/capability/list.go4
-rw-r--r--plumbing/protocol/packp/capability/list_test.go11
-rw-r--r--plumbing/protocol/packp/sideband/demux_test.go3
-rw-r--r--plumbing/protocol/packp/srvresp.go28
-rw-r--r--plumbing/protocol/packp/srvresp_test.go17
-rw-r--r--plumbing/protocol/packp/ulreq.go2
-rw-r--r--plumbing/protocol/packp/ulreq_decode_test.go5
-rw-r--r--plumbing/protocol/packp/ulreq_test.go2
-rw-r--r--plumbing/protocol/packp/updreq.go2
-rw-r--r--plumbing/protocol/packp/updreq_decode.go3
-rw-r--r--plumbing/protocol/packp/updreq_decode_test.go15
-rw-r--r--plumbing/protocol/packp/updreq_encode_test.go5
-rw-r--r--plumbing/protocol/packp/updreq_test.go4
-rw-r--r--plumbing/protocol/packp/uppackreq.go6
-rw-r--r--plumbing/protocol/packp/uppackreq_test.go9
-rw-r--r--plumbing/protocol/packp/uppackresp.go2
-rw-r--r--plumbing/protocol/packp/uppackresp_test.go26
-rw-r--r--plumbing/reference.go35
-rw-r--r--plumbing/reference_test.go24
-rw-r--r--plumbing/transport/client/client.go32
-rw-r--r--plumbing/transport/common.go41
-rw-r--r--plumbing/transport/common_test.go32
-rw-r--r--plumbing/transport/file/common_test.go3
-rw-r--r--plumbing/transport/git/common.go9
-rw-r--r--plumbing/transport/git/common_test.go3
-rw-r--r--plumbing/transport/http/common.go178
-rw-r--r--plumbing/transport/http/common_test.go57
-rw-r--r--plumbing/transport/http/internal/test/proxy_test.go72
-rw-r--r--plumbing/transport/http/internal/test/test_utils.go43
-rw-r--r--plumbing/transport/http/proxy_test.go119
-rw-r--r--plumbing/transport/http/receive_pack.go2
-rw-r--r--plumbing/transport/http/testdata/certs/server.crt22
-rw-r--r--plumbing/transport/http/testdata/certs/server.key28
-rw-r--r--plumbing/transport/http/transport.go40
-rw-r--r--plumbing/transport/http/upload_pack.go2
-rw-r--r--plumbing/transport/http/upload_pack_test.go4
-rw-r--r--plumbing/transport/internal/common/common.go7
-rw-r--r--plumbing/transport/internal/common/common_test.go14
-rw-r--r--plumbing/transport/server/server.go4
-rw-r--r--plumbing/transport/ssh/auth_method.go21
-rw-r--r--plumbing/transport/ssh/common.go49
-rw-r--r--plumbing/transport/ssh/common_test.go79
-rw-r--r--plumbing/transport/ssh/internal/test/proxy_test.go112
-rw-r--r--plumbing/transport/ssh/internal/test/test_utils.go83
-rw-r--r--plumbing/transport/ssh/proxy_test.go71
-rw-r--r--plumbing/transport/ssh/upload_pack_test.go10
-rw-r--r--plumbing/transport/test/receive_pack.go6
-rw-r--r--plumbing/transport/test/upload_pack.go6
-rw-r--r--references.go264
-rw-r--r--references_test.go401
-rw-r--r--remote.go180
-rw-r--r--remote_test.go232
-rw-r--r--repository.go201
-rw-r--r--repository_test.go229
-rw-r--r--storage/filesystem/dotgit/dotgit.go104
-rw-r--r--storage/filesystem/dotgit/dotgit_test.go100
-rw-r--r--storage/filesystem/dotgit/writers.go5
-rw-r--r--storage/filesystem/object.go24
-rw-r--r--storage/filesystem/object_test.go18
-rw-r--r--storage/filesystem/shallow.go2
-rw-r--r--storage/test/storage_suite.go5
-rw-r--r--submodule.go17
-rw-r--r--submodule_test.go71
-rw-r--r--utils/ioutil/common_test.go8
-rw-r--r--utils/sync/bufio.go29
-rw-r--r--utils/sync/bufio_test.go26
-rw-r--r--utils/sync/bytes.go51
-rw-r--r--utils/sync/bytes_test.go49
-rw-r--r--utils/sync/zlib.go74
-rw-r--r--utils/sync/zlib_test.go74
-rw-r--r--worktree.go89
-rw-r--r--worktree_bsd.go2
-rw-r--r--worktree_commit.go21
-rw-r--r--worktree_commit_test.go26
-rw-r--r--worktree_linux.go2
-rw-r--r--worktree_status.go33
-rw-r--r--worktree_test.go359
-rw-r--r--worktree_unix_other.go2
168 files changed, 5597 insertions, 1976 deletions
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
new file mode 100644
index 0000000..fbb867c
--- /dev/null
+++ b/.github/workflows/codeql.yml
@@ -0,0 +1,44 @@
+name: "CodeQL"
+
+on:
+ push:
+ branches: [ "master" ]
+ pull_request:
+ branches: [ "master" ]
+ schedule:
+ - cron: '00 5 * * 1'
+
+jobs:
+ analyze:
+ name: Analyze
+ runs-on: ubuntu-latest
+ permissions:
+ actions: read
+ contents: read
+ security-events: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ language: [ 'go' ]
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2
+
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@29b1f65c5e92e24fe6b6647da1eaabe529cec70f # v2.3.3
+ with:
+ languages: ${{ matrix.language }}
+ # xref: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
+ # xref: https://codeql.github.com/codeql-query-help/go/
+ queries: security-and-quality
+
+ - name: Manual Build
+ run: go build ./...
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@29b1f65c5e92e24fe6b6647da1eaabe529cec70f # v2.3.3
+ with:
+ category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/git.yml b/.github/workflows/git.yml
index bbccaa3..60cfa12 100644
--- a/.github/workflows/git.yml
+++ b/.github/workflows/git.yml
@@ -1,5 +1,8 @@
on: [push, pull_request]
name: Git Compatibility
+permissions:
+ contents: read
+
jobs:
test:
strategy:
@@ -14,15 +17,15 @@ jobs:
steps:
- name: Install Go
- uses: actions/setup-go@v1
+ uses: actions/setup-go@v3
with:
- go-version: 1.14.x
+ go-version: 1.20.x
- name: Checkout code
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
- name: Install build dependencies
- run: sudo apt-get install gettext
+ run: sudo apt-get update && sudo apt-get install gettext libcurl4-openssl-dev
- name: Git Build
run: make build-git
@@ -38,3 +41,11 @@ jobs:
- name: Test
run: make test-coverage
+
+ - name: Test SHA256
+ run: make test-sha256
+
+ - name: Build go-git with CGO disabled
+ run: go build ./...
+ env:
+ CGO_ENABLED: 0
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 1644dcf..ce5872d 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -1,22 +1,25 @@
on: [push, pull_request]
name: Test
+permissions:
+ contents: read
+
jobs:
version-matrix:
strategy:
fail-fast: false
matrix:
- go-version: [1.15.x, 1.16.x]
+ go-version: [1.19.x, 1.20.x]
platform: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.platform }}
steps:
- name: Install Go
- uses: actions/setup-go@v1
+ uses: actions/setup-go@v3
with:
go-version: ${{ matrix.go-version }}
- name: Checkout code
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
- name: Configure known hosts
if: matrix.platform != 'ubuntu-latest'
@@ -31,17 +34,3 @@ jobs:
- name: Test
run: make test-coverage
-
- - name: Convert coverage to lcov
- if: matrix.platform == 'ubuntu-latest' && matrix.go-version == '1.14.x'
- uses: jandelgado/gcov2lcov-action@v1.0.0
- with:
- infile: coverage.out
- outfile: coverage.lcov
-
- - name: Coveralls
- if: matrix.platform == 'ubuntu-latest' && matrix.go-version == '1.14.x'
- uses: coverallsapp/github-action@master
- with:
- github-token: ${{ secrets.GITHUB_TOKEN }}
- path-to-lcov: coverage.lcov
diff --git a/.gitignore b/.gitignore
index 038dd9f..361133d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,3 +2,5 @@ coverage.out
*~
coverage.txt
profile.out
+.tmp/
+.git-dist/
diff --git a/COMPATIBILITY.md b/COMPATIBILITY.md
index 2a72b50..afd4f03 100644
--- a/COMPATIBILITY.md
+++ b/COMPATIBILITY.md
@@ -1,111 +1,233 @@
-Supported Capabilities
-======================
-
-Here is a non-comprehensive table of git commands and features whose equivalent
-is supported by go-git.
-
-| Feature | Status | Notes |
-|---------------------------------------|--------|-------|
-| **config** |
-| config | ✔ | Reading and modifying per-repository configuration (`.git/config`) is supported. Global configuration (`$HOME/.gitconfig`) is not. |
-| **getting and creating repositories** |
-| init | ✔ | Plain init and `--bare` are supported. Flags `--template`, `--separate-git-dir` and `--shared` are not. |
-| clone | ✔ | Plain clone and equivalents to `--progress`, `--single-branch`, `--depth`, `--origin`, `--recurse-submodules` are supported. Others are not. |
-| **basic snapshotting** |
-| add | ✔ | Plain add is supported. Any other flags aren't supported |
-| status | ✔ |
-| commit | ✔ |
-| reset | ✔ |
-| rm | ✔ |
-| mv | ✔ |
-| **branching and merging** |
-| branch | ✔ |
-| checkout | ✔ | Basic usages of checkout are supported. |
-| merge | ✖ |
-| mergetool | ✖ |
-| stash | ✖ |
-| tag | ✔ |
-| **sharing and updating projects** |
-| fetch | ✔ |
-| pull | ✔ | Only supports merges where the merge can be resolved as a fast-forward. |
-| push | ✔ |
-| remote | ✔ |
-| submodule | ✔ |
-| **inspection and comparison** |
-| show | ✔ |
-| log | ✔ |
-| shortlog | (see log) |
-| describe | |
-| **patching** |
-| apply | ✖ |
-| cherry-pick | ✖ |
-| diff | ✔ | Patch object with UnifiedDiff output representation |
-| rebase | ✖ |
-| revert | ✖ |
-| **debugging** |
-| bisect | ✖ |
-| blame | ✔ |
-| grep | ✔ |
-| **email** ||
-| am | ✖ |
-| apply | ✖ |
-| format-patch | ✖ |
-| send-email | ✖ |
-| request-pull | ✖ |
-| **external systems** |
-| svn | ✖ |
-| fast-import | ✖ |
-| **administration** |
-| clean | ✔ |
-| gc | ✖ |
-| fsck | ✖ |
-| reflog | ✖ |
-| filter-branch | ✖ |
-| instaweb | ✖ |
-| archive | ✖ |
-| bundle | ✖ |
-| prune | ✖ |
-| repack | ✖ |
-| **server admin** |
-| daemon | |
-| update-server-info | |
-| **advanced** |
-| notes | ✖ |
-| replace | ✖ |
-| worktree | ✖ |
-| annotate | (see blame) |
-| **gpg** |
-| git-verify-commit | ✔ |
-| git-verify-tag | ✔ |
-| **plumbing commands** |
-| cat-file | ✔ |
-| check-ignore | |
-| commit-tree | |
-| count-objects | |
-| diff-index | |
-| for-each-ref | ✔ |
-| hash-object | ✔ |
-| ls-files | ✔ |
-| merge-base | ✔ | Calculates the merge-base only between two commits, and supports `--independent` and `--is-ancestor` modifiers; Does not support `--fork-point` nor `--octopus` modifiers. |
-| read-tree | |
-| rev-list | ✔ |
-| rev-parse | |
-| show-ref | ✔ |
-| symbolic-ref | ✔ |
-| update-index | |
-| update-ref | |
-| verify-pack | |
-| write-tree | |
-| **protocols** |
-| http(s):// (dumb) | ✖ |
-| http(s):// (smart) | ✔ |
-| git:// | ✔ |
-| ssh:// | ✔ |
-| file:// | partial | Warning: this is not pure Golang. This shells out to the `git` binary. |
-| custom | ✔ |
-| **other features** |
-| gitignore | ✔ |
-| gitattributes | ✖ |
-| index version | |
-| packfile version | |
-| push-certs | ✖ |
+# Supported Features
+
+Here is a non-comprehensive table of git commands and features and their
+compatibility status with go-git.
+
+## Getting and creating repositories
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `init` | | ✅ | | |
+| `init` | `--bare` | ✅ | | |
+| `init` | `--template` <br/> `--separate-git-dir` <br/> `--shared` | ❌ | | |
+| `clone` | | ✅ | | - [PlainClone](_examples/clone/main.go) |
+| `clone` | Authentication: <br/> - none <br/> - access token <br/> - username + password <br/> - ssh | ✅ | | - [clone ssh](_examples/clone/auth/ssh/main.go) <br/> - [clone access token](_examples/clone/auth/basic/access_token/main.go) <br/> - [clone user + password](_examples/clone/auth/basic/username_password/main.go) |
+| `clone` | `--progress` <br/> `--single-branch` <br/> `--depth` <br/> `--origin` <br/> `--recurse-submodules` | ✅ | | - [recurse submodules](_examples/clone/main.go) <br/> - [progress](_examples/progress/main.go) |
+
+## Basic snapshotting
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `add` | | ✅ | Plain add is supported. Any other flags aren't supported | |
+| `status` | | ✅ | | |
+| `commit` | | ✅ | | - [commit](_examples/commit/main.go) |
+| `reset` | | ✅ | | |
+| `rm` | | ✅ | | |
+| `mv` | | ✅ | | |
+
+## Branching and merging
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `branch` | | ✅ | | - [branch](_examples/branch/main.go) |
+| `checkout` | | ✅ | Basic usages of checkout are supported. | - [checkout](_examples/checkout/main.go) |
+| `merge` | | ❌ | | |
+| `mergetool` | | ❌ | | |
+| `stash` | | ❌ | | |
+| `tag` | | ✅ | | - [tag](_examples/tag/main.go) <br/> - [tag create and push](_examples/tag-create-push/main.go) |
+
+## Sharing and updating projects
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `fetch` | | ✅ | | |
+| `pull` | | ✅ | Only supports merges where the merge can be resolved as a fast-forward. | - [pull](_examples/pull/main.go) |
+| `push` | | ✅ | | - [push](_examples/push/main.go) |
+| `remote` | | ✅ | | - [remotes](_examples/remotes/main.go) |
+| `submodule` | | ✅ | | - [submodule](_examples/submodule/main.go) |
+| `submodule` | deinit | ❌ | | |
+
+## Inspection and comparison
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `show` | | ✅ | | |
+| `log` | | ✅ | | - [log](_examples/log/main.go) |
+| `shortlog` | | (see log) | | |
+| `describe` | | ❌ | | |
+
+## Patching
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `apply` | | ❌ | | |
+| `cherry-pick` | | ❌ | | |
+| `diff` | | ✅ | Patch object with UnifiedDiff output representation. | |
+| `rebase` | | ❌ | | |
+| `revert` | | ❌ | | |
+
+## Debugging
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `bisect` | | ❌ | | |
+| `blame` | | ✅ | | - [blame](_examples/blame/main.go) |
+| `grep` | | ✅ | | |
+
+## Email
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `am` | | ❌ | | |
+| `apply` | | ❌ | | |
+| `format-patch` | | ❌ | | |
+| `send-email` | | ❌ | | |
+| `request-pull` | | ❌ | | |
+
+## External systems
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `svn` | | ❌ | | |
+| `fast-import` | | ❌ | | |
+| `lfs` | | ❌ | | |
+
+## Administration
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `clean` | | ✅ | | |
+| `gc` | | ❌ | | |
+| `fsck` | | ❌ | | |
+| `reflog` | | ❌ | | |
+| `filter-branch` | | ❌ | | |
+| `instaweb` | | ❌ | | |
+| `archive` | | ❌ | | |
+| `bundle` | | ❌ | | |
+| `prune` | | ❌ | | |
+| `repack` | | ❌ | | |
+
+## Server admin
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `daemon` | | ❌ | | |
+| `update-server-info` | | ❌ | | |
+
+## Advanced
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `notes` | | ❌ | | |
+| `replace` | | ❌ | | |
+| `worktree` | | ❌ | | |
+| `annotate` | | (see blame) | | |
+
+## GPG
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `git-verify-commit` | | ✅ | | |
+| `git-verify-tag` | | ✅ | | |
+
+## Plumbing commands
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `cat-file` | | ✅ | | |
+| `check-ignore` | | ❌ | | |
+| `commit-tree` | | ❌ | | |
+| `count-objects` | | ❌ | | |
+| `diff-index` | | ❌ | | |
+| `for-each-ref` | | ✅ | | |
+| `hash-object` | | ✅ | | |
+| `ls-files` | | ✅ | | |
+| `ls-remote` | | ✅ | | - [ls-remote](_examples/ls-remote/main.go) |
+| `merge-base` | `--independent` <br/> `--is-ancestor` | ⚠️ (partial) | Calculates the merge-base only between two commits. | - [merge-base](_examples/merge_base/main.go) |
+| `merge-base` | `--fork-point` <br/> `--octopus` | ❌ | | |
+| `read-tree` | | ❌ | | |
+| `rev-list` | | ✅ | | |
+| `rev-parse` | | ❌ | | |
+| `show-ref` | | ✅ | | |
+| `symbolic-ref` | | ✅ | | |
+| `update-index` | | ❌ | | |
+| `update-ref` | | ❌ | | |
+| `verify-pack` | | ❌ | | |
+| `write-tree` | | ❌ | | |
+
+## Indexes and Git Protocols
+
+| Feature | Version | Status | Notes |
+|---|---|---|---|
+| index | [v1](https://github.com/git/git/blob/master/Documentation/gitformat-index.txt) | ❌ | |
+| index | [v2](https://github.com/git/git/blob/master/Documentation/gitformat-index.txt) | ✅ | |
+| index | [v3](https://github.com/git/git/blob/master/Documentation/gitformat-index.txt) | ❌ | |
+| pack-protocol | [v1](https://github.com/git/git/blob/master/Documentation/gitprotocol-pack.txt) | ✅ | |
+| pack-protocol | [v2](https://github.com/git/git/blob/master/Documentation/gitprotocol-v2.txt) | ❌ | |
+| multi-pack-index | [v1](https://github.com/git/git/blob/master/Documentation/gitformat-pack.txt) | ❌ | |
+| pack-*.rev files | [v1](https://github.com/git/git/blob/master/Documentation/gitformat-pack.txt) | ❌ | |
+| pack-*.mtimes files | [v1](https://github.com/git/git/blob/master/Documentation/gitformat-pack.txt) | ❌ | |
+| cruft packs | | ❌ | |
+
+## Capabilities
+
+| Feature | Status | Notes |
+|---|---|---|
+| `multi_ack` | ❌ | |
+| `multi_ack_detailed` | ❌ | |
+| `no-done` | ❌ | |
+| `thin-pack` | ❌ | |
+| `side-band` | ⚠️ (partial) | |
+| `side-band-64k` | ⚠️ (partial) | |
+| `ofs-delta` | ✅ | |
+| `agent` | ✅ | |
+| `object-format` | ❌ | |
+| `symref` | ✅ | |
+| `shallow` | ✅ | |
+| `deepen-since` | ✅ | |
+| `deepen-not` | ❌ | |
+| `deepen-relative` | ❌ | |
+| `no-progress` | ✅ | |
+| `include-tag` | ✅ | |
+| `report-status` | ✅ | |
+| `report-status-v2` | ❌ | |
+| `delete-refs` | ✅ | |
+| `quiet` | ❌ | |
+| `atomic` | ✅ | |
+| `push-options` | ✅ | |
+| `allow-tip-sha1-in-want` | ✅ | |
+| `allow-reachable-sha1-in-want` | ❌ | |
+| `push-cert=<nonce>` | ❌ | |
+| `filter` | ❌ | |
+| `session-id=<session id>` | ❌ | |
+
+## Transport Schemes
+
+| Scheme | Status | Notes | Examples |
+|---|---|---|---|
+| `http(s)://` (dumb) | ❌ | | |
+| `http(s)://` (smart) | ✅ | | |
+| `git://` | ✅ | | |
+| `ssh://` | ✅ | | |
+| `file://` | ⚠️ (partial) | Warning: this is not pure Golang. This shells out to the `git` binary. | |
+| Custom | ✅ | All existing schemes can be replaced by custom implementations. | - [custom_http](_examples/custom_http/main.go) |
+
+## SHA256
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `init` | | ✅ | Requires building with tag sha256. | - [init](_examples/sha256/main.go) |
+| `commit` | | ✅ | Requires building with tag sha256. | - [commit](_examples/sha256/main.go) |
+| `pull` | | ❌ | | |
+| `fetch` | | ❌ | | |
+| `push` | | ❌ | | |
+
+## Other features
+
+| Feature | Sub-feature | Status | Notes | Examples |
+|---|---|---|---|---|
+| `config` | `--local` | ✅ | Read and write per-repository (`.git/config`). | |
+| `config` | `--global` <br/> `--system` | ✅ | Read-only. | |
+| `gitignore` | | ✅ | | |
+| `gitattributes` | | ✅ | | |
+| `git-worktree` | | ❌ | Multiple worktrees are not supported. | |
diff --git a/EXTENDING.md b/EXTENDING.md
new file mode 100644
index 0000000..a2778e3
--- /dev/null
+++ b/EXTENDING.md
@@ -0,0 +1,78 @@
+# Extending go-git
+
+`go-git` was built in a highly extensible manner, which enables some of its functionalities to be changed or extended without the need of changing its codebase. Here are the key extensibility features:
+
+## Dot Git Storers
+
+Dot git storers are the components responsible for storing the Git internal files, including objects and references.
+
+The built-in storer implementations include [memory](storage/memory) and [filesystem](storage/filesystem). The `memory` storer stores all the data in memory, and its use look like this:
+
+```go
+ r, err := git.Init(memory.NewStorage(), nil)
+```
+
+The `filesystem` storer stores the data in the OS filesystem, and can be used as follows:
+
+```go
+ r, err := git.Init(filesystem.NewStorage(osfs.New("/tmp/foo")), nil)
+```
+
+New implementations can be created by implementing the [storage.Storer interface](storage/storer.go#L16).
+
+## Filesystem
+
+Git repository worktrees are managed using a filesystem abstraction based on [go-billy](https://github.com/go-git/go-billy). The Git operations will take place against the specific filesystem implementation. Initialising a repository in Memory can be done as follows:
+
+```go
+ fs := memfs.New()
+ r, err := git.Init(memory.NewStorage(), fs)
+```
+
+The same operation can be done against the OS filesystem:
+
+```go
+ fs := osfs.New("/tmp/foo")
+ r, err := git.Init(memory.NewStorage(), fs)
+```
+
+New filesystems (e.g. cloud based storage) could be created by implementing `go-billy`'s [Filesystem interface](https://github.com/go-git/go-billy/blob/326c59f064021b821a55371d57794fbfb86d4cb3/fs.go#L52).
+
+## Transport Schemes
+
+Git supports various transport schemes, including `http`, `https`, `ssh`, `git`, `file`. `go-git` defines the [transport.Transport interface](plumbing/transport/common.go#L48) to represent them.
+
+The built-in implementations can be replaced by calling `client.InstallProtocol`.
+
+An example of changing the built-in `https` implementation to skip TLS could look like this:
+
+```go
+ customClient := &http.Client{
+ Transport: &http.Transport{
+ TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
+ },
+ }
+
+ client.InstallProtocol("https", githttp.NewClient(customClient))
+```
+
+Some internal implementations enables code reuse amongst the different transport implementations. Some of these may be made public in the future (e.g. `plumbing/transport/internal/common`).
+
+## Cache
+
+Several different operations across `go-git` lean on caching of objects in order to achieve optimal performance. The caching functionality is defined by the [cache.Object interface](plumbing/cache/common.go#L17).
+
+Two built-in implementations are `cache.ObjectLRU` and `cache.BufferLRU`. However, the caching functionality can be customized by implementing the interface `cache.Object` interface.
+
+## Hash
+
+`go-git` uses the `crypto.Hash` interface to represent hash functions. The built-in implementations are `github.com/pjbgf/sha1cd` for SHA1 and Go's `crypto/SHA256`.
+
+The default hash functions can be changed by calling `hash.RegisterHash`.
+```go
+ func init() {
+ hash.RegisterHash(crypto.SHA1, sha1.New)
+ }
+```
+
+New `SHA1` or `SHA256` hash functions that implement the `hash.RegisterHash` interface can be registered by calling `RegisterHash`.
diff --git a/Makefile b/Makefile
index d10922f..66adc8c 100644
--- a/Makefile
+++ b/Makefile
@@ -27,7 +27,13 @@ build-git:
test:
@echo "running against `git version`"; \
- $(GOTEST) ./...
+ $(GOTEST) -race ./...
+
+TEMP_REPO := $(shell mktemp)
+test-sha256:
+ $(GOCMD) run -tags sha256 _examples/sha256/main.go $(TEMP_REPO)
+ cd $(TEMP_REPO) && git fsck
+ rm -rf $(TEMP_REPO)
test-coverage:
@echo "running against `git version`"; \
@@ -35,4 +41,4 @@ test-coverage:
$(GOTEST) -coverprofile=$(COVERAGE_REPORT) -coverpkg=./... -covermode=$(COVERAGE_MODE) ./...
clean:
- rm -rf $(GIT_DIST_PATH) \ No newline at end of file
+ rm -rf $(GIT_DIST_PATH)
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..0d2f8d0
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,38 @@
+# go-git Security Policy
+
+The purpose of this security policy is to outline `go-git`'s process
+for reporting, handling and disclosing security sensitive information.
+
+## Supported Versions
+
+The project follows a version support policy where only the latest minor
+release is actively supported. Therefore, only issues that impact the latest
+minor release will be fixed. Users are encouraged to upgrade to the latest
+minor/patch release to benefit from the most up-to-date features, bug fixes,
+and security enhancements.​
+
+The supported versions policy applies to both the `go-git` library and its
+associated repositories within the `go-git` org.
+
+## Reporting Security Issues
+
+Please report any security vulnerabilities or potential weaknesses in `go-git`
+privately via go-git-security@googlegroups.com. Do not publicly disclose the
+details of the vulnerability until a fix has been implemented and released.
+
+During the process the project maintainers will investigate the report, so please
+provide detailed information, including steps to reproduce, affected versions, and any mitigations if known.
+
+The project maintainers will acknowledge the receipt of the report and work with
+the reporter to validate and address the issue.
+
+Please note that `go-git` does not have any bounty programs, and therefore do
+not provide financial compensation for disclosures.
+
+## Security Disclosure Process
+
+The project maintainers will make every effort to promptly address security issues.
+
+Once a security vulnerability is fixed, a security advisory will be published to notify users and provide appropriate mitigation measures.
+
+All `go-git` advisories can be found at https://github.com/go-git/go-git/security/advisories.
diff --git a/_examples/README.md b/_examples/README.md
index 3a4c539..46f1fb0 100644
--- a/_examples/README.md
+++ b/_examples/README.md
@@ -23,10 +23,13 @@ Here you can find a list of annotated _go-git_ examples:
- [remotes](remotes/main.go) - Working with remotes: adding, removing, etc.
- [progress](progress/main.go) - Printing the progress information from the sideband.
- [revision](revision/main.go) - Solve a revision into a commit.
-- [config](config/main.go) - Explains how to work with config files.
- [submodule](submodule/main.go) - Submodule update remote.
+- [azure devops](azure_devops/main.go) - Cloning Azure DevOps repositories.
+- [blame](blame/main.go) - Blame/annotate a commit.
+- [ls-remote](ls-remote/main.go) - List remote tags without cloning a repository.
### Advanced
- [custom_http](custom_http/main.go) - Replacing the HTTP client using a custom one.
- [clone with context](context/main.go) - Cloning a repository with graceful cancellation.
- [storage](storage/README.md) - Implementing a custom storage system.
+- [sha256](sha256/main.go) - Init and commiting repositories that use sha256 as object format.
diff --git a/_examples/azure_devops/main.go b/_examples/azure_devops/main.go
new file mode 100644
index 0000000..9c02ca0
--- /dev/null
+++ b/_examples/azure_devops/main.go
@@ -0,0 +1,56 @@
+package main
+
+import (
+ "fmt"
+ "os"
+
+ git "github.com/go-git/go-git/v5"
+ . "github.com/go-git/go-git/v5/_examples"
+ "github.com/go-git/go-git/v5/plumbing/protocol/packp/capability"
+ "github.com/go-git/go-git/v5/plumbing/transport"
+ "github.com/go-git/go-git/v5/plumbing/transport/http"
+)
+
+func main() {
+ CheckArgs("<url>", "<directory>", "<azuredevops_username>", "<azuredevops_password>")
+ url, directory, username, password := os.Args[1], os.Args[2], os.Args[3], os.Args[4]
+
+ // Clone the given repository to the given directory
+ Info("git clone %s %s", url, directory)
+
+ // Azure DevOps requires capabilities multi_ack / multi_ack_detailed,
+ // which are not fully implemented and by default are included in
+ // transport.UnsupportedCapabilities.
+ //
+ // The initial clone operations require a full download of the repository,
+ // and therefore those unsupported capabilities are not as crucial, so
+ // by removing them from that list allows for the first clone to work
+ // successfully.
+ //
+ // Additional fetches will yield issues, therefore work always from a clean
+ // clone until those capabilities are fully supported.
+ //
+ // New commits and pushes against a remote worked without any issues.
+ transport.UnsupportedCapabilities = []capability.Capability{
+ capability.ThinPack,
+ }
+
+ r, err := git.PlainClone(directory, false, &git.CloneOptions{
+ Auth: &http.BasicAuth{
+ Username: username,
+ Password: password,
+ },
+ URL: url,
+ Progress: os.Stdout,
+ })
+ CheckIfError(err)
+
+ // ... retrieving the branch being pointed by HEAD
+ ref, err := r.Head()
+ CheckIfError(err)
+ // ... retrieving the commit object
+ commit, err := r.CommitObject(ref.Hash())
+ CheckIfError(err)
+
+ fmt.Println(commit)
+}
diff --git a/_examples/blame/main.go b/_examples/blame/main.go
new file mode 100644
index 0000000..3ffae17
--- /dev/null
+++ b/_examples/blame/main.go
@@ -0,0 +1,48 @@
+package main
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/go-git/go-git/v5"
+ . "github.com/go-git/go-git/v5/_examples"
+)
+
+// Basic example of how to blame a repository.
+func main() {
+ CheckArgs("<url>", "<file_to_blame>")
+ url := os.Args[1]
+ path := os.Args[2]
+
+ tmp, err := os.MkdirTemp("", "go-git-blame-*")
+ CheckIfError(err)
+
+ defer os.RemoveAll(tmp)
+
+ // Clone the given repository.
+ Info("git clone %s %s", url, tmp)
+ r, err := git.PlainClone(
+ tmp,
+ false,
+ &git.CloneOptions{
+ URL: url,
+ Tags: git.NoTags,
+ },
+ )
+ CheckIfError(err)
+
+ // Retrieve the branch's HEAD, to then get the HEAD commit.
+ ref, err := r.Head()
+ CheckIfError(err)
+
+ c, err := r.CommitObject(ref.Hash())
+ CheckIfError(err)
+
+ Info("git blame %s", path)
+
+ // Blame the given file/path.
+ br, err := git.Blame(c, path)
+ CheckIfError(err)
+
+ fmt.Printf("%s", br.String())
+}
diff --git a/_examples/clone/auth/ssh/main.go b/_examples/clone/auth/ssh/main.go
index 1e06e44..5f21d90 100644
--- a/_examples/clone/auth/ssh/main.go
+++ b/_examples/clone/auth/ssh/main.go
@@ -32,9 +32,6 @@ func main() {
}
r, err := git.PlainClone(directory, false, &git.CloneOptions{
- // The intended use of a GitHub personal access token is in replace of your password
- // because access tokens can easily be revoked.
- // https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/
Auth: publicKeys,
URL: url,
Progress: os.Stdout,
diff --git a/_examples/commit/main.go b/_examples/commit/main.go
index 4529c84..3f3c880 100644
--- a/_examples/commit/main.go
+++ b/_examples/commit/main.go
@@ -2,7 +2,6 @@ package main
import (
"fmt"
- "io/ioutil"
"os"
"path/filepath"
"time"
@@ -29,7 +28,7 @@ func main() {
// worktree of the project using the go standard library.
Info("echo \"hello world!\" > example-git-file")
filename := filepath.Join(directory, "example-git-file")
- err = ioutil.WriteFile(filename, []byte("hello world!"), 0644)
+ err = os.WriteFile(filename, []byte("hello world!"), 0644)
CheckIfError(err)
// Adds the new file to the staging area.
diff --git a/_examples/common_test.go b/_examples/common_test.go
index 9945c87..6630f15 100644
--- a/_examples/common_test.go
+++ b/_examples/common_test.go
@@ -3,7 +3,6 @@ package examples
import (
"flag"
"go/build"
- "io/ioutil"
"os"
"os/exec"
"path/filepath"
@@ -65,7 +64,7 @@ func TestExamples(t *testing.T) {
}
func tempFolder() string {
- path, err := ioutil.TempDir("", "")
+ path, err := os.MkdirTemp("", "")
CheckIfError(err)
tempFolders = append(tempFolders, path)
diff --git a/_examples/ls-remote/main.go b/_examples/ls-remote/main.go
index af038d6..e49e8c9 100644
--- a/_examples/ls-remote/main.go
+++ b/_examples/ls-remote/main.go
@@ -2,25 +2,35 @@ package main
import (
"log"
+ "os"
"github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/storage/memory"
+
+ . "github.com/go-git/go-git/v5/_examples"
)
// Retrieve remote tags without cloning repository
func main() {
+ CheckArgs("<url>")
+ url := os.Args[1]
+
+ Info("git ls-remote --tags %s", url)
// Create the remote with repository URL
rem := git.NewRemote(memory.NewStorage(), &config.RemoteConfig{
Name: "origin",
- URLs: []string{"https://github.com/Zenika/MARCEL"},
+ URLs: []string{url},
})
log.Print("Fetching tags...")
// We can then use every Remote functions to retrieve wanted information
- refs, err := rem.List(&git.ListOptions{})
+ refs, err := rem.List(&git.ListOptions{
+ // Returns all references, including peeled references.
+ PeelingOption: git.AppendPeeled,
+ })
if err != nil {
log.Fatal(err)
}
diff --git a/_examples/sha256/main.go b/_examples/sha256/main.go
new file mode 100644
index 0000000..e1772d2
--- /dev/null
+++ b/_examples/sha256/main.go
@@ -0,0 +1,65 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "time"
+
+ "github.com/go-git/go-git/v5"
+ . "github.com/go-git/go-git/v5/_examples"
+ "github.com/go-git/go-git/v5/plumbing/format/config"
+ "github.com/go-git/go-git/v5/plumbing/object"
+)
+
+// This example requires building with the sha256 tag for it to work:
+// go run -tags sha256 main.go /tmp/repository
+
+// Basic example of how to initialise a repository using sha256 as the hashing algorithmn.
+func main() {
+ CheckArgs("<directory>")
+ directory := os.Args[1]
+
+ os.RemoveAll(directory)
+
+ // Init a new repository using the ObjectFormat SHA256.
+ r, err := git.PlainInitWithOptions(directory, &git.PlainInitOptions{ObjectFormat: config.SHA256})
+ CheckIfError(err)
+
+ w, err := r.Worktree()
+ CheckIfError(err)
+
+ // ... we need a file to commit so let's create a new file inside of the
+ // worktree of the project using the go standard library.
+ Info("echo \"hello world!\" > example-git-file")
+ filename := filepath.Join(directory, "example-git-file")
+ err = os.WriteFile(filename, []byte("hello world!"), 0644)
+ CheckIfError(err)
+
+ // Adds the new file to the staging area.
+ Info("git add example-git-file")
+ _, err = w.Add("example-git-file")
+ CheckIfError(err)
+
+ // Commits the current staging area to the repository, with the new file
+ // just created. We should provide the object.Signature of Author of the
+ // commit Since version 5.0.1, we can omit the Author signature, being read
+ // from the git config files.
+ Info("git commit -m \"example go-git commit\"")
+ commit, err := w.Commit("example go-git commit", &git.CommitOptions{
+ Author: &object.Signature{
+ Name: "John Doe",
+ Email: "john@doe.org",
+ When: time.Now(),
+ },
+ })
+
+ CheckIfError(err)
+
+ // Prints the current HEAD to verify that all worked well.
+ Info("git show -s")
+ obj, err := r.CommitObject(commit)
+ CheckIfError(err)
+
+ fmt.Println(obj)
+}
diff --git a/_examples/tag-create-push/main.go b/_examples/tag-create-push/main.go
index c443641..b820c76 100644
--- a/_examples/tag-create-push/main.go
+++ b/_examples/tag-create-push/main.go
@@ -2,7 +2,6 @@ package main
import (
"fmt"
- "io/ioutil"
"log"
"os"
@@ -67,7 +66,7 @@ func cloneRepo(url, dir, publicKeyPath string) (*git.Repository, error) {
func publicKey(filePath string) (*ssh.PublicKeys, error) {
var publicKey *ssh.PublicKeys
- sshKey, _ := ioutil.ReadFile(filePath)
+ sshKey, _ := os.ReadFile(filePath)
publicKey, err := ssh.NewPublicKeys("git", []byte(sshKey), "")
if err != nil {
return nil, err
diff --git a/blame.go b/blame.go
index 43634b3..2a877dc 100644
--- a/blame.go
+++ b/blame.go
@@ -2,16 +2,18 @@ package git
import (
"bytes"
+ "container/heap"
"errors"
"fmt"
+ "io"
"strconv"
- "strings"
"time"
"unicode/utf8"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object"
"github.com/go-git/go-git/v5/utils/diff"
+ "github.com/sergi/go-diff/diffmatchpatch"
)
// BlameResult represents the result of a Blame operation.
@@ -29,67 +31,86 @@ type BlameResult struct {
func Blame(c *object.Commit, path string) (*BlameResult, error) {
// The file to blame is identified by the input arguments:
// commit and path. commit is a Commit object obtained from a Repository. Path
- // represents a path to a specific file contained into the repository.
+ // represents a path to a specific file contained in the repository.
//
- // Blaming a file is a two step process:
+ // Blaming a file is done by walking the tree in reverse order trying to find where each line was last modified.
//
- // 1. Create a linear history of the commits affecting a file. We use
- // revlist.New for that.
+ // When a diff is found it cannot immediately assume it came from that commit, as it may have come from 1 of its
+ // parents, so it will first try to resolve those diffs from its parents, if it couldn't find the change in its
+ // parents then it will assign the change to itself.
//
- // 2. Then build a graph with a node for every line in every file in
- // the history of the file.
+ // When encountering 2 parents that have made the same change to a file it will choose the parent that was merged
+ // into the current branch first (this is determined by the order of the parents inside the commit).
//
- // Each node is assigned a commit: Start by the nodes in the first
- // commit. Assign that commit as the creator of all its lines.
- //
- // Then jump to the nodes in the next commit, and calculate the diff
- // between the two files. Newly created lines get
- // assigned the new commit as its origin. Modified lines also get
- // this new commit. Untouched lines retain the old commit.
- //
- // All this work is done in the assignOrigin function which holds all
- // the internal relevant data in a "blame" struct, that is not
- // exported.
- //
- // TODO: ways to improve the efficiency of this function:
- // 1. Improve revlist
- // 2. Improve how to traverse the history (example a backward traversal will
- // be much more efficient)
- //
- // TODO: ways to improve the function in general:
- // 1. Add memoization between revlist and assign.
- // 2. It is using much more memory than needed, see the TODOs below.
+ // This currently works on a line by line basis, if performance becomes an issue it could be changed to work with
+ // hunks rather than lines. Then when encountering diff hunks it would need to split them where necessary.
b := new(blame)
b.fRev = c
b.path = path
+ b.q = new(priorityQueue)
- // get all the file revisions
- if err := b.fillRevs(); err != nil {
+ file, err := b.fRev.File(path)
+ if err != nil {
return nil, err
}
-
- // calculate the line tracking graph and fill in
- // file contents in data.
- if err := b.fillGraphAndData(); err != nil {
+ finalLines, err := file.Lines()
+ if err != nil {
return nil, err
}
+ finalLength := len(finalLines)
- file, err := b.fRev.File(b.path)
+ needsMap := make([]lineMap, finalLength)
+ for i := range needsMap {
+ needsMap[i] = lineMap{i, i, nil, -1}
+ }
+ contents, err := file.Contents()
if err != nil {
return nil, err
}
- finalLines, err := file.Lines()
+ b.q.Push(&queueItem{
+ nil,
+ nil,
+ c,
+ path,
+ contents,
+ needsMap,
+ 0,
+ false,
+ 0,
+ })
+ items := make([]*queueItem, 0)
+ for {
+ items = items[:0]
+ for {
+ if b.q.Len() == 0 {
+ return nil, errors.New("invalid state: no items left on the blame queue")
+ }
+ item := b.q.Pop()
+ items = append(items, item)
+ next := b.q.Peek()
+ if next == nil || next.Hash != item.Commit.Hash {
+ break
+ }
+ }
+ finished, err := b.addBlames(items)
+ if err != nil {
+ return nil, err
+ }
+ if finished == true {
+ break
+ }
+ }
if err != nil {
return nil, err
}
- // Each node (line) holds the commit where it was introduced or
- // last modified. To achieve that we use the FORWARD algorithm
- // described in Zimmermann, et al. "Mining Version Archives for
- // Co-changed Lines", in proceedings of the Mining Software
- // Repositories workshop, Shanghai, May 22-23, 2006.
- lines, err := newLines(finalLines, b.sliceGraph(len(b.graph)-1))
+ b.lineToCommit = make([]*object.Commit, finalLength)
+ for i := range needsMap {
+ b.lineToCommit[i] = needsMap[i].Commit
+ }
+
+ lines, err := newLines(finalLines, b.lineToCommit)
if err != nil {
return nil, err
}
@@ -105,6 +126,8 @@ func Blame(c *object.Commit, path string) (*BlameResult, error) {
type Line struct {
// Author is the email address of the last author that modified the line.
Author string
+ // AuthorName is the name of the last author that modified the line.
+ AuthorName string
// Text is the original text of the line.
Text string
// Date is when the original text of the line was introduced
@@ -113,31 +136,21 @@ type Line struct {
Hash plumbing.Hash
}
-func newLine(author, text string, date time.Time, hash plumbing.Hash) *Line {
+func newLine(author, authorName, text string, date time.Time, hash plumbing.Hash) *Line {
return &Line{
- Author: author,
- Text: text,
- Hash: hash,
- Date: date,
+ Author: author,
+ AuthorName: authorName,
+ Text: text,
+ Hash: hash,
+ Date: date,
}
}
func newLines(contents []string, commits []*object.Commit) ([]*Line, error) {
- lcontents := len(contents)
- lcommits := len(commits)
-
- if lcontents != lcommits {
- if lcontents == lcommits-1 && contents[lcontents-1] != "\n" {
- contents = append(contents, "\n")
- } else {
- return nil, errors.New("contents and commits have different length")
- }
- }
-
- result := make([]*Line, 0, lcontents)
+ result := make([]*Line, 0, len(contents))
for i := range contents {
result = append(result, newLine(
- commits[i].Author.Email, contents[i],
+ commits[i].Author.Email, commits[i].Author.Name, contents[i],
commits[i].Author.When, commits[i].Hash,
))
}
@@ -152,151 +165,426 @@ type blame struct {
path string
// the commit of the final revision of the file to blame
fRev *object.Commit
- // the chain of revisions affecting the the file to blame
- revs []*object.Commit
- // the contents of the file across all its revisions
- data []string
- // the graph of the lines in the file across all the revisions
- graph [][]*object.Commit
+ // resolved lines
+ lineToCommit []*object.Commit
+ // queue of commits that need resolving
+ q *priorityQueue
}
-// calculate the history of a file "path", starting from commit "from", sorted by commit date.
-func (b *blame) fillRevs() error {
- var err error
-
- b.revs, err = references(b.fRev, b.path)
- return err
+type lineMap struct {
+ Orig, Cur int
+ Commit *object.Commit
+ FromParentNo int
}
-// build graph of a file from its revision history
-func (b *blame) fillGraphAndData() error {
- //TODO: not all commits are needed, only the current rev and the prev
- b.graph = make([][]*object.Commit, len(b.revs))
- b.data = make([]string, len(b.revs)) // file contents in all the revisions
- // for every revision of the file, starting with the first
- // one...
- for i, rev := range b.revs {
+func (b *blame) addBlames(curItems []*queueItem) (bool, error) {
+ curItem := curItems[0]
+
+ // Simple optimisation to merge paths, there is potential to go a bit further here and check for any duplicates
+ // not only if they are all the same.
+ if len(curItems) == 1 {
+ curItems = nil
+ } else if curItem.IdenticalToChild {
+ allSame := true
+ lenCurItems := len(curItems)
+ lowestParentNo := curItem.ParentNo
+ for i := 1; i < lenCurItems; i++ {
+ if !curItems[i].IdenticalToChild || curItem.Child != curItems[i].Child {
+ allSame = false
+ break
+ }
+ lowestParentNo = min(lowestParentNo, curItems[i].ParentNo)
+ }
+ if allSame {
+ curItem.Child.numParentsNeedResolving = curItem.Child.numParentsNeedResolving - lenCurItems + 1
+ curItems = nil // free the memory
+ curItem.ParentNo = lowestParentNo
+
+ // Now check if we can remove the parent completely
+ for curItem.Child.IdenticalToChild && curItem.Child.MergedChildren == nil && curItem.Child.numParentsNeedResolving == 1 {
+ oldChild := curItem.Child
+ curItem.Child = oldChild.Child
+ curItem.ParentNo = oldChild.ParentNo
+ }
+ }
+ }
+
+ // if we have more than 1 item for this commit, create a single needsMap
+ if len(curItems) > 1 {
+ curItem.MergedChildren = make([]childToNeedsMap, len(curItems))
+ for i, c := range curItems {
+ curItem.MergedChildren[i] = childToNeedsMap{c.Child, c.NeedsMap, c.IdenticalToChild, c.ParentNo}
+ }
+ newNeedsMap := make([]lineMap, 0, len(curItem.NeedsMap))
+ newNeedsMap = append(newNeedsMap, curItems[0].NeedsMap...)
+
+ for i := 1; i < len(curItems); i++ {
+ cur := curItems[i].NeedsMap
+ n := 0 // position in newNeedsMap
+ c := 0 // position in current list
+ for c < len(cur) {
+ if n == len(newNeedsMap) {
+ newNeedsMap = append(newNeedsMap, cur[c:]...)
+ break
+ } else if newNeedsMap[n].Cur == cur[c].Cur {
+ n++
+ c++
+ } else if newNeedsMap[n].Cur < cur[c].Cur {
+ n++
+ } else {
+ newNeedsMap = append(newNeedsMap, cur[c])
+ newPos := len(newNeedsMap) - 1
+ for newPos > n {
+ newNeedsMap[newPos-1], newNeedsMap[newPos] = newNeedsMap[newPos], newNeedsMap[newPos-1]
+ newPos--
+ }
+ }
+ }
+ }
+ curItem.NeedsMap = newNeedsMap
+ curItem.IdenticalToChild = false
+ curItem.Child = nil
+ curItems = nil // free the memory
+ }
+
+ parents, err := parentsContainingPath(curItem.path, curItem.Commit)
+ if err != nil {
+ return false, err
+ }
+
+ anyPushed := false
+ for parnetNo, prev := range parents {
+ currentHash, err := blobHash(curItem.path, curItem.Commit)
+ if err != nil {
+ return false, err
+ }
+ prevHash, err := blobHash(prev.Path, prev.Commit)
+ if err != nil {
+ return false, err
+ }
+ if currentHash == prevHash {
+ if len(parents) == 1 && curItem.MergedChildren == nil && curItem.IdenticalToChild {
+ // commit that has 1 parent and 1 child and is the same as both, bypass it completely
+ b.q.Push(&queueItem{
+ Child: curItem.Child,
+ Commit: prev.Commit,
+ path: prev.Path,
+ Contents: curItem.Contents,
+ NeedsMap: curItem.NeedsMap, // reuse the NeedsMap as we are throwing away this item
+ IdenticalToChild: true,
+ ParentNo: curItem.ParentNo,
+ })
+ } else {
+ b.q.Push(&queueItem{
+ Child: curItem,
+ Commit: prev.Commit,
+ path: prev.Path,
+ Contents: curItem.Contents,
+ NeedsMap: append([]lineMap(nil), curItem.NeedsMap...), // create new slice and copy
+ IdenticalToChild: true,
+ ParentNo: parnetNo,
+ })
+ curItem.numParentsNeedResolving++
+ }
+ anyPushed = true
+ continue
+ }
+
// get the contents of the file
- file, err := rev.File(b.path)
+ file, err := prev.Commit.File(prev.Path)
if err != nil {
- return nil
+ return false, err
}
- b.data[i], err = file.Contents()
+ prevContents, err := file.Contents()
if err != nil {
- return err
+ return false, err
}
- nLines := countLines(b.data[i])
- // create a node for each line
- b.graph[i] = make([]*object.Commit, nLines)
- // assign a commit to each node
- // if this is the first revision, then the node is assigned to
- // this first commit.
- if i == 0 {
- for j := 0; j < nLines; j++ {
- b.graph[i][j] = b.revs[i]
+
+ hunks := diff.Do(prevContents, curItem.Contents)
+ prevl := -1
+ curl := -1
+ need := 0
+ getFromParent := make([]lineMap, 0)
+ out:
+ for h := range hunks {
+ hLines := countLines(hunks[h].Text)
+ for hl := 0; hl < hLines; hl++ {
+ switch {
+ case hunks[h].Type == diffmatchpatch.DiffEqual:
+ prevl++
+ curl++
+ if curl == curItem.NeedsMap[need].Cur {
+ // add to needs
+ getFromParent = append(getFromParent, lineMap{curl, prevl, nil, -1})
+ // move to next need
+ need++
+ if need >= len(curItem.NeedsMap) {
+ break out
+ }
+ }
+ case hunks[h].Type == diffmatchpatch.DiffInsert:
+ curl++
+ if curl == curItem.NeedsMap[need].Cur {
+ // the line we want is added, it may have been added here (or by another parent), skip it for now
+ need++
+ if need >= len(curItem.NeedsMap) {
+ break out
+ }
+ }
+ case hunks[h].Type == diffmatchpatch.DiffDelete:
+ prevl += hLines
+ continue out
+ default:
+ return false, errors.New("invalid state: invalid hunk Type")
+ }
}
- } else {
- // if this is not the first commit, then assign to the old
- // commit or to the new one, depending on what the diff
- // says.
- b.assignOrigin(i, i-1)
+ }
+
+ if len(getFromParent) > 0 {
+ b.q.Push(&queueItem{
+ curItem,
+ nil,
+ prev.Commit,
+ prev.Path,
+ prevContents,
+ getFromParent,
+ 0,
+ false,
+ parnetNo,
+ })
+ curItem.numParentsNeedResolving++
+ anyPushed = true
}
}
- return nil
-}
-// sliceGraph returns a slice of commits (one per line) for a particular
-// revision of a file (0=first revision).
-func (b *blame) sliceGraph(i int) []*object.Commit {
- fVs := b.graph[i]
- result := make([]*object.Commit, 0, len(fVs))
- for _, v := range fVs {
- c := *v
- result = append(result, &c)
+ curItem.Contents = "" // no longer need, free the memory
+
+ if !anyPushed {
+ return finishNeeds(curItem)
}
- return result
+
+ return false, nil
}
-// Assigns origin to vertexes in current (c) rev from data in its previous (p)
-// revision
-func (b *blame) assignOrigin(c, p int) {
- // assign origin based on diff info
- hunks := diff.Do(b.data[p], b.data[c])
- sl := -1 // source line
- dl := -1 // destination line
- for h := range hunks {
- hLines := countLines(hunks[h].Text)
- for hl := 0; hl < hLines; hl++ {
- switch {
- case hunks[h].Type == 0:
- sl++
- dl++
- b.graph[c][dl] = b.graph[p][sl]
- case hunks[h].Type == 1:
- dl++
- b.graph[c][dl] = b.revs[c]
- case hunks[h].Type == -1:
- sl++
- default:
- panic("unreachable")
+func finishNeeds(curItem *queueItem) (bool, error) {
+ // any needs left in the needsMap must have come from this revision
+ for i := range curItem.NeedsMap {
+ if curItem.NeedsMap[i].Commit == nil {
+ curItem.NeedsMap[i].Commit = curItem.Commit
+ curItem.NeedsMap[i].FromParentNo = -1
+ }
+ }
+
+ if curItem.Child == nil && curItem.MergedChildren == nil {
+ return true, nil
+ }
+
+ if curItem.MergedChildren == nil {
+ return applyNeeds(curItem.Child, curItem.NeedsMap, curItem.IdenticalToChild, curItem.ParentNo)
+ }
+
+ for _, ctn := range curItem.MergedChildren {
+ m := 0 // position in merged needs map
+ p := 0 // position in parent needs map
+ for p < len(ctn.NeedsMap) {
+ if ctn.NeedsMap[p].Cur == curItem.NeedsMap[m].Cur {
+ ctn.NeedsMap[p].Commit = curItem.NeedsMap[m].Commit
+ m++
+ p++
+ } else if ctn.NeedsMap[p].Cur < curItem.NeedsMap[m].Cur {
+ p++
+ } else {
+ m++
}
}
+ finished, err := applyNeeds(ctn.Child, ctn.NeedsMap, ctn.IdenticalToChild, ctn.ParentNo)
+ if finished || err != nil {
+ return finished, err
+ }
}
-}
-// GoString prints the results of a Blame using git-blame's style.
-func (b *blame) GoString() string {
- var buf bytes.Buffer
+ return false, nil
+}
- file, err := b.fRev.File(b.path)
- if err != nil {
- panic("PrettyPrint: internal error in repo.Data")
+func applyNeeds(child *queueItem, needsMap []lineMap, identicalToChild bool, parentNo int) (bool, error) {
+ if identicalToChild {
+ for i := range child.NeedsMap {
+ l := &child.NeedsMap[i]
+ if l.Cur != needsMap[i].Cur || l.Orig != needsMap[i].Orig {
+ return false, errors.New("needsMap isn't the same? Why not??")
+ }
+ if l.Commit == nil || parentNo < l.FromParentNo {
+ l.Commit = needsMap[i].Commit
+ l.FromParentNo = parentNo
+ }
+ }
+ } else {
+ i := 0
+ out:
+ for j := range child.NeedsMap {
+ l := &child.NeedsMap[j]
+ for needsMap[i].Orig < l.Cur {
+ i++
+ if i == len(needsMap) {
+ break out
+ }
+ }
+ if l.Cur == needsMap[i].Orig {
+ if l.Commit == nil || parentNo < l.FromParentNo {
+ l.Commit = needsMap[i].Commit
+ l.FromParentNo = parentNo
+ }
+ }
+ }
}
- contents, err := file.Contents()
- if err != nil {
- panic("PrettyPrint: internal error in repo.Data")
+ child.numParentsNeedResolving--
+ if child.numParentsNeedResolving == 0 {
+ finished, err := finishNeeds(child)
+ if finished || err != nil {
+ return finished, err
+ }
}
- lines := strings.Split(contents, "\n")
+ return false, nil
+}
+
+// String prints the results of a Blame using git-blame's style.
+func (b BlameResult) String() string {
+ var buf bytes.Buffer
+
// max line number length
- mlnl := len(strconv.Itoa(len(lines)))
+ mlnl := len(strconv.Itoa(len(b.Lines)))
// max author length
mal := b.maxAuthorLength()
- format := fmt.Sprintf("%%s (%%-%ds %%%dd) %%s\n",
- mal, mlnl)
+ format := fmt.Sprintf("%%s (%%-%ds %%s %%%dd) %%s\n", mal, mlnl)
- fVs := b.graph[len(b.graph)-1]
- for ln, v := range fVs {
- fmt.Fprintf(&buf, format, v.Hash.String()[:8],
- prettyPrintAuthor(fVs[ln]), ln+1, lines[ln])
+ for ln := range b.Lines {
+ _, _ = fmt.Fprintf(&buf, format, b.Lines[ln].Hash.String()[:8],
+ b.Lines[ln].AuthorName, b.Lines[ln].Date.Format("2006-01-02 15:04:05 -0700"), ln+1, b.Lines[ln].Text)
}
return buf.String()
}
-// utility function to pretty print the author.
-func prettyPrintAuthor(c *object.Commit) string {
- return fmt.Sprintf("%s %s", c.Author.Name, c.Author.When.Format("2006-01-02"))
-}
-
// utility function to calculate the number of runes needed
// to print the longest author name in the blame of a file.
-func (b *blame) maxAuthorLength() int {
- memo := make(map[plumbing.Hash]struct{}, len(b.graph)-1)
- fVs := b.graph[len(b.graph)-1]
+func (b BlameResult) maxAuthorLength() int {
m := 0
- for ln := range fVs {
- if _, ok := memo[fVs[ln].Hash]; ok {
- continue
- }
- memo[fVs[ln].Hash] = struct{}{}
- m = max(m, utf8.RuneCountInString(prettyPrintAuthor(fVs[ln])))
+ for ln := range b.Lines {
+ m = max(m, utf8.RuneCountInString(b.Lines[ln].AuthorName))
}
return m
}
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
+
func max(a, b int) int {
if a > b {
return a
}
return b
}
+
+type childToNeedsMap struct {
+ Child *queueItem
+ NeedsMap []lineMap
+ IdenticalToChild bool
+ ParentNo int
+}
+
+type queueItem struct {
+ Child *queueItem
+ MergedChildren []childToNeedsMap
+ Commit *object.Commit
+ path string
+ Contents string
+ NeedsMap []lineMap
+ numParentsNeedResolving int
+ IdenticalToChild bool
+ ParentNo int
+}
+
+type priorityQueueImp []*queueItem
+
+func (pq *priorityQueueImp) Len() int { return len(*pq) }
+func (pq *priorityQueueImp) Less(i, j int) bool {
+ return !(*pq)[i].Commit.Less((*pq)[j].Commit)
+}
+func (pq *priorityQueueImp) Swap(i, j int) { (*pq)[i], (*pq)[j] = (*pq)[j], (*pq)[i] }
+func (pq *priorityQueueImp) Push(x any) { *pq = append(*pq, x.(*queueItem)) }
+func (pq *priorityQueueImp) Pop() any {
+ n := len(*pq)
+ ret := (*pq)[n-1]
+ (*pq)[n-1] = nil // ovoid memory leak
+ *pq = (*pq)[0 : n-1]
+
+ return ret
+}
+func (pq *priorityQueueImp) Peek() *object.Commit {
+ if len(*pq) == 0 {
+ return nil
+ }
+ return (*pq)[0].Commit
+}
+
+type priorityQueue priorityQueueImp
+
+func (pq *priorityQueue) Init() { heap.Init((*priorityQueueImp)(pq)) }
+func (pq *priorityQueue) Len() int { return (*priorityQueueImp)(pq).Len() }
+func (pq *priorityQueue) Push(c *queueItem) {
+ heap.Push((*priorityQueueImp)(pq), c)
+}
+func (pq *priorityQueue) Pop() *queueItem {
+ return heap.Pop((*priorityQueueImp)(pq)).(*queueItem)
+}
+func (pq *priorityQueue) Peek() *object.Commit { return (*priorityQueueImp)(pq).Peek() }
+
+type parentCommit struct {
+ Commit *object.Commit
+ Path string
+}
+
+func parentsContainingPath(path string, c *object.Commit) ([]parentCommit, error) {
+ // TODO: benchmark this method making git.object.Commit.parent public instead of using
+ // an iterator
+ var result []parentCommit
+ iter := c.Parents()
+ for {
+ parent, err := iter.Next()
+ if err == io.EOF {
+ return result, nil
+ }
+ if err != nil {
+ return nil, err
+ }
+ if _, err := parent.File(path); err == nil {
+ result = append(result, parentCommit{parent, path})
+ } else {
+ // look for renames
+ patch, err := parent.Patch(c)
+ if err != nil {
+ return nil, err
+ } else if patch != nil {
+ for _, fp := range patch.FilePatches() {
+ from, to := fp.Files()
+ if from != nil && to != nil && to.Path() == path {
+ result = append(result, parentCommit{parent, from.Path()})
+ break
+ }
+ }
+ }
+ }
+ }
+}
+
+func blobHash(path string, commit *object.Commit) (plumbing.Hash, error) {
+ file, err := commit.File(path)
+ if err != nil {
+ return plumbing.ZeroHash, err
+ }
+ return file.Hash, nil
+}
diff --git a/blame_test.go b/blame_test.go
index 7895b66..1c5db26 100644
--- a/blame_test.go
+++ b/blame_test.go
@@ -28,7 +28,7 @@ func (s *BlameSuite) TestNewLines(c *C) {
}
func (s *BlameSuite) TestNewLinesWithNewLine(c *C) {
- lines, err := newLines([]string{"foo"}, []*object.Commit{
+ lines, err := newLines([]string{"foo", ""}, []*object.Commit{
{Message: "foo"},
{Message: "bar"},
})
@@ -36,7 +36,7 @@ func (s *BlameSuite) TestNewLinesWithNewLine(c *C) {
c.Assert(err, IsNil)
c.Assert(lines, HasLen, 2)
c.Assert(lines[0].Text, Equals, "foo")
- c.Assert(lines[1].Text, Equals, "\n")
+ c.Assert(lines[1].Text, Equals, "")
}
type blameTest struct {
@@ -81,10 +81,11 @@ func (s *BlameSuite) mockBlame(c *C, t blameTest, r *Repository) (blame *BlameRe
commit, err := r.CommitObject(plumbing.NewHash(t.blames[i]))
c.Assert(err, IsNil)
l := &Line{
- Author: commit.Author.Email,
- Text: lines[i],
- Date: commit.Author.When,
- Hash: commit.Hash,
+ Author: commit.Author.Email,
+ AuthorName: commit.Author.Name,
+ Text: lines[i],
+ Date: commit.Author.When,
+ Hash: commit.Hash,
}
blamedLines = append(blamedLines, l)
}
@@ -146,7 +147,11 @@ var blameTests = [...]blameTest{
repeat("6ecf0ef2c2dffb796033e5a02219af86ec6584e5", 7),
)},
/*
- // Failed
+ // This fails due to the different diff tool being used to create the patches.
+ // For example in commit d4b48a39aba7d3bd3e8abef2274a95b112d1ae73 when "function echo_status()" is added:
+ // - 'git diff' adds the new "}\n\n" to the end of function and keeps the "}\n\n" beforehand blamed to the previous commit
+ // - our diff adds the new "}\n\n" before the function and reuses the existing "}\n\n" to close the new function
+ // the resultant file is the same, but it causes blame not to match.
{"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "InstallSpinnaker.sh", concat(
repeat("ce9f123d790717599aaeb76bc62510de437761be", 2),
repeat("a47d0aaeda421f06df248ad65bd58230766bf118", 1),
@@ -341,7 +346,9 @@ var blameTests = [...]blameTest{
repeat("a24001f6938d425d0e7504bdf5d27fc866a85c3d", 185),
)},
/*
- // Fail by 3
+ // This fails due to the different diff tool being used to create the patches.
+ // For commit c89dab0d42f1856d157357e9010f8cc6a12f5b1f our diff tool keeps an existing newline as moved in the file, whereas
+ // 'git diff' says the existing newline was deleted and a new one created.
{"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "pylib/spinnaker/configurator.py", concat(
repeat("a24001f6938d425d0e7504bdf5d27fc866a85c3d", 53),
repeat("c89dab0d42f1856d157357e9010f8cc6a12f5b1f", 1),
@@ -423,65 +430,63 @@ var blameTests = [...]blameTest{
repeat("637ba49300f701cfbd859c1ccf13c4f39a9ba1c8", 1),
repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 13),
)},
+ {"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "config/default-spinnaker-local.yml", concat(
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 9),
+ repeat("5e09821cbd7d710405b61cab0a795c2982a71b9c", 2),
+ repeat("99534ecc895fe17a1d562bb3049d4168a04d0865", 1),
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 2),
+ repeat("a596972a661d9a7deca8abd18b52ce1a39516e89", 1),
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 5),
+ repeat("5e09821cbd7d710405b61cab0a795c2982a71b9c", 2),
+ repeat("a596972a661d9a7deca8abd18b52ce1a39516e89", 1),
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 5),
+ repeat("5e09821cbd7d710405b61cab0a795c2982a71b9c", 1),
+ repeat("8980daf661408a3faa1f22c225702a5c1d11d5c9", 1),
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 25),
+ repeat("caf6d62e8285d4681514dd8027356fb019bc97ff", 1),
+ repeat("eaf7614cad81e8ab5c813dd4821129d0c04ea449", 1),
+ repeat("caf6d62e8285d4681514dd8027356fb019bc97ff", 1),
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 24),
+ repeat("974b775a8978b120ff710cac93a21c7387b914c9", 2),
+ repeat("3ce7b902a51bac2f10994f7d1f251b616c975e54", 1),
+ repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 6),
+ repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 14),
+ repeat("7c8d9a6081d9cb7a56c479bfe64d70540ea32795", 5),
+ repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 2),
+ )},
+ {"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "config/spinnaker.yml", concat(
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 32),
+ repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 2),
+ repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 1),
+ repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 6),
+ repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 2),
+ repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 2),
+ repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 2),
+ repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 3),
+ repeat("7c8d9a6081d9cb7a56c479bfe64d70540ea32795", 3),
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 50),
+ repeat("974b775a8978b120ff710cac93a21c7387b914c9", 2),
+ repeat("d4553dac205023fa77652308af1a2d1cf52138fb", 1),
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 9),
+ repeat("caf6d62e8285d4681514dd8027356fb019bc97ff", 1),
+ repeat("eaf7614cad81e8ab5c813dd4821129d0c04ea449", 1),
+ repeat("caf6d62e8285d4681514dd8027356fb019bc97ff", 1),
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 39),
+ repeat("079e42e7c979541b6fab7343838f7b9fd4a360cd", 6),
+ repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 15),
+ )},
/*
- // fail a few lines
- {"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "config/default-spinnaker-local.yml", concat(
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 9),
- repeat("5e09821cbd7d710405b61cab0a795c2982a71b9c", 2),
- repeat("99534ecc895fe17a1d562bb3049d4168a04d0865", 1),
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 2),
- repeat("a596972a661d9a7deca8abd18b52ce1a39516e89", 1),
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 5),
- repeat("5e09821cbd7d710405b61cab0a795c2982a71b9c", 2),
- repeat("a596972a661d9a7deca8abd18b52ce1a39516e89", 1),
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 5),
- repeat("5e09821cbd7d710405b61cab0a795c2982a71b9c", 1),
- repeat("8980daf661408a3faa1f22c225702a5c1d11d5c9", 1),
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 25),
- repeat("caf6d62e8285d4681514dd8027356fb019bc97ff", 1),
- repeat("eaf7614cad81e8ab5c813dd4821129d0c04ea449", 1),
- repeat("caf6d62e8285d4681514dd8027356fb019bc97ff", 1),
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 24),
- repeat("974b775a8978b120ff710cac93a21c7387b914c9", 2),
- repeat("3ce7b902a51bac2f10994f7d1f251b616c975e54", 1),
- repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 6),
- repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 14),
- repeat("7c8d9a6081d9cb7a56c479bfe64d70540ea32795", 5),
- repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 2),
- )},
- */
- /*
- // fail one line
- {"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "config/spinnaker.yml", concat(
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 32),
- repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 2),
- repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 1),
- repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 6),
- repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 2),
- repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 2),
- repeat("5a2a845bc08974a36d599a4a4b7e25be833823b0", 2),
- repeat("41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c", 3),
- repeat("7c8d9a6081d9cb7a56c479bfe64d70540ea32795", 3),
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 50),
- repeat("974b775a8978b120ff710cac93a21c7387b914c9", 2),
- repeat("d4553dac205023fa77652308af1a2d1cf52138fb", 1),
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 9),
- repeat("caf6d62e8285d4681514dd8027356fb019bc97ff", 1),
- repeat("eaf7614cad81e8ab5c813dd4821129d0c04ea449", 1),
- repeat("caf6d62e8285d4681514dd8027356fb019bc97ff", 1),
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 39),
- repeat("079e42e7c979541b6fab7343838f7b9fd4a360cd", 6),
- repeat("ae904e8d60228c21c47368f6a10f1cc9ca3aeebf", 15),
- )},
- */
- /*
+ // This fails due to the different diff tool being used to create the patches
+ // For commit d1ff4e13e9e0b500821aa558373878f93487e34b our diff tool keeps an existing newline as moved in the file, whereas
+ // 'git diff' says the existing newline was deleted and a new one created
{"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "dev/install_development.sh", concat(
repeat("99534ecc895fe17a1d562bb3049d4168a04d0865", 1),
repeat("d1ff4e13e9e0b500821aa558373878f93487e34b", 71),
)},
*/
/*
- // FAIL two lines interchanged
+ // This fails due to the different diff tool being used to create the patches
+ // For commit 838aed816872c52ed435e4876a7b64dba0bed500 the diff tools assign the "fi\n" to different line numbers
{"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "dev/bootstrap_dev.sh", concat(
repeat("a24001f6938d425d0e7504bdf5d27fc866a85c3d", 95),
repeat("838aed816872c52ed435e4876a7b64dba0bed500", 1),
@@ -542,10 +547,7 @@ var blameTests = [...]blameTest{
repeat("838aed816872c52ed435e4876a7b64dba0bed500", 8),
)},
*/
- /*
- // FAIL move?
- {"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "dev/create_google_dev_vm.sh", concat(
- repeat("a24001f6938d425d0e7504bdf5d27fc866a85c3d", 20),
- )},
- */
+ {"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "dev/create_google_dev_vm.sh", concat(
+ repeat("a24001f6938d425d0e7504bdf5d27fc866a85c3d", 20),
+ )},
}
diff --git a/common_test.go b/common_test.go
index c0c4009..ff4d6b8 100644
--- a/common_test.go
+++ b/common_test.go
@@ -3,10 +3,12 @@ package git
import (
"os"
"testing"
+ "time"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/cache"
"github.com/go-git/go-git/v5/plumbing/format/packfile"
+ "github.com/go-git/go-git/v5/plumbing/object"
"github.com/go-git/go-git/v5/storage/filesystem"
"github.com/go-git/go-git/v5/storage/memory"
@@ -37,7 +39,7 @@ func (s *BaseSuite) TearDownSuite(c *C) {
s.Suite.TearDownSuite(c)
}
-func (s *BaseSuite) buildBasicRepository(c *C) {
+func (s *BaseSuite) buildBasicRepository(_ *C) {
f := fixtures.Basic().One()
s.Repository = s.NewRepository(f)
}
@@ -105,13 +107,16 @@ func (s *BaseSuite) NewRepositoryFromPackfile(f *fixtures.Fixture) *Repository {
storer := memory.NewStorage()
p := f.Packfile()
- defer p.Close()
+ defer func() { _ = p.Close() }()
if err := packfile.UpdateObjectStorage(storer, p); err != nil {
panic(err)
}
- storer.SetReference(plumbing.NewHashReference(plumbing.HEAD, plumbing.NewHash(f.Head)))
+ err := storer.SetReference(plumbing.NewHashReference(plumbing.HEAD, plumbing.NewHash(f.Head)))
+ if err != nil {
+ panic(err)
+ }
r, err := Open(storer, memfs.New())
if err != nil {
@@ -133,14 +138,37 @@ func (s *BaseSuite) GetLocalRepositoryURL(f *fixtures.Fixture) string {
func (s *BaseSuite) TemporalDir() (path string, clean func()) {
fs := osfs.New(os.TempDir())
- path, err := util.TempDir(fs, "", "")
+ relPath, err := util.TempDir(fs, "", "")
+ if err != nil {
+ panic(err)
+ }
+
+ path = fs.Join(fs.Root(), relPath)
+ clean = func() {
+ _ = util.RemoveAll(fs, relPath)
+ }
+
+ return
+}
+
+func (s *BaseSuite) TemporalHomeDir() (path string, clean func()) {
+ home, err := os.UserHomeDir()
+ if err != nil {
+ panic(err)
+ }
+
+ fs := osfs.New(home)
+ relPath, err := util.TempDir(fs, "", "")
if err != nil {
panic(err)
}
- return fs.Join(fs.Root(), path), func() {
- util.RemoveAll(fs, path)
+ path = fs.Join(fs.Root(), relPath)
+ clean = func() {
+ _ = util.RemoveAll(fs, relPath)
}
+
+ return
}
func (s *BaseSuite) TemporalFilesystem() (fs billy.Filesystem, clean func()) {
@@ -155,9 +183,11 @@ func (s *BaseSuite) TemporalFilesystem() (fs billy.Filesystem, clean func()) {
panic(err)
}
- return fs, func() {
- util.RemoveAll(fs, path)
+ clean = func() {
+ _ = util.RemoveAll(fs, path)
}
+
+ return
}
type SuiteCommon struct{}
@@ -204,3 +234,36 @@ func AssertReferencesMissing(c *C, r *Repository, expected []string) {
c.Assert(err, Equals, plumbing.ErrReferenceNotFound)
}
}
+
+func CommitNewFile(c *C, repo *Repository, fileName string) plumbing.Hash {
+ wt, err := repo.Worktree()
+ c.Assert(err, IsNil)
+
+ fd, err := wt.Filesystem.Create(fileName)
+ c.Assert(err, IsNil)
+
+ _, err = fd.Write([]byte("# test file"))
+ c.Assert(err, IsNil)
+
+ err = fd.Close()
+ c.Assert(err, IsNil)
+
+ _, err = wt.Add(fileName)
+ c.Assert(err, IsNil)
+
+ sha, err := wt.Commit("test commit", &CommitOptions{
+ Author: &object.Signature{
+ Name: "test",
+ Email: "test@example.com",
+ When: time.Now(),
+ },
+ Committer: &object.Signature{
+ Name: "test",
+ Email: "test@example.com",
+ When: time.Now(),
+ },
+ })
+ c.Assert(err, IsNil)
+
+ return sha
+}
diff --git a/config/config.go b/config/config.go
index 2a196d0..82af12d 100644
--- a/config/config.go
+++ b/config/config.go
@@ -6,7 +6,6 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"os"
"path/filepath"
"sort"
@@ -15,7 +14,6 @@ import (
"github.com/go-git/go-billy/v5/osfs"
"github.com/go-git/go-git/v5/internal/url"
format "github.com/go-git/go-git/v5/plumbing/format/config"
- "github.com/mitchellh/go-homedir"
)
const (
@@ -60,6 +58,8 @@ type Config struct {
// CommentChar is the character indicating the start of a
// comment for commands like commit and tag
CommentChar string
+ // RepositoryFormatVersion identifies the repository format and layout version.
+ RepositoryFormatVersion format.RepositoryFormatVersion
}
User struct {
@@ -97,6 +97,17 @@ type Config struct {
DefaultBranch string
}
+ Extensions struct {
+ // ObjectFormat specifies the hash algorithm to use. The
+ // acceptable values are sha1 and sha256. If not specified,
+ // sha1 is assumed. It is an error to specify this key unless
+ // core.repositoryFormatVersion is 1.
+ //
+ // This setting must not be changed after repository initialization
+ // (e.g. clone or init).
+ ObjectFormat format.ObjectFormat
+ }
+
// Remotes list of repository remotes, the key of the map is the name
// of the remote, should equal to RemoteConfig.Name.
Remotes map[string]*RemoteConfig
@@ -132,7 +143,7 @@ func NewConfig() *Config {
// ReadConfig reads a config file from a io.Reader.
func ReadConfig(r io.Reader) (*Config, error) {
- b, err := ioutil.ReadAll(r)
+ b, err := io.ReadAll(r)
if err != nil {
return nil, err
}
@@ -185,7 +196,7 @@ func Paths(scope Scope) ([]string, error) {
files = append(files, filepath.Join(xdg, "git/config"))
}
- home, err := homedir.Dir()
+ home, err := os.UserHomeDir()
if err != nil {
return nil, err
}
@@ -227,28 +238,32 @@ func (c *Config) Validate() error {
}
const (
- remoteSection = "remote"
- submoduleSection = "submodule"
- branchSection = "branch"
- coreSection = "core"
- packSection = "pack"
- userSection = "user"
- authorSection = "author"
- committerSection = "committer"
- initSection = "init"
- urlSection = "url"
- fetchKey = "fetch"
- urlKey = "url"
- bareKey = "bare"
- worktreeKey = "worktree"
- commentCharKey = "commentChar"
- windowKey = "window"
- mergeKey = "merge"
- rebaseKey = "rebase"
- nameKey = "name"
- emailKey = "email"
- descriptionKey = "description"
- defaultBranchKey = "defaultBranch"
+ remoteSection = "remote"
+ submoduleSection = "submodule"
+ branchSection = "branch"
+ coreSection = "core"
+ packSection = "pack"
+ userSection = "user"
+ authorSection = "author"
+ committerSection = "committer"
+ initSection = "init"
+ urlSection = "url"
+ extensionsSection = "extensions"
+ fetchKey = "fetch"
+ urlKey = "url"
+ bareKey = "bare"
+ worktreeKey = "worktree"
+ commentCharKey = "commentChar"
+ windowKey = "window"
+ mergeKey = "merge"
+ rebaseKey = "rebase"
+ nameKey = "name"
+ emailKey = "email"
+ descriptionKey = "description"
+ defaultBranchKey = "defaultBranch"
+ repositoryFormatVersionKey = "repositoryformatversion"
+ objectFormat = "objectformat"
+ mirrorKey = "mirror"
// DefaultPackWindow holds the number of previous objects used to
// generate deltas. The value 10 is the same used by git command.
@@ -392,6 +407,7 @@ func (c *Config) unmarshalInit() {
// Marshal returns Config encoded as a git-config file.
func (c *Config) Marshal() ([]byte, error) {
c.marshalCore()
+ c.marshalExtensions()
c.marshalUser()
c.marshalPack()
c.marshalRemotes()
@@ -411,12 +427,24 @@ func (c *Config) Marshal() ([]byte, error) {
func (c *Config) marshalCore() {
s := c.Raw.Section(coreSection)
s.SetOption(bareKey, fmt.Sprintf("%t", c.Core.IsBare))
+ if string(c.Core.RepositoryFormatVersion) != "" {
+ s.SetOption(repositoryFormatVersionKey, string(c.Core.RepositoryFormatVersion))
+ }
if c.Core.Worktree != "" {
s.SetOption(worktreeKey, c.Core.Worktree)
}
}
+func (c *Config) marshalExtensions() {
+ // Extensions are only supported on Version 1, therefore
+ // ignore them otherwise.
+ if c.Core.RepositoryFormatVersion == format.Version_1 {
+ s := c.Raw.Section(extensionsSection)
+ s.SetOption(objectFormat, string(c.Extensions.ObjectFormat))
+ }
+}
+
func (c *Config) marshalUser() {
s := c.Raw.Section(userSection)
if c.User.Name != "" {
@@ -550,6 +578,8 @@ type RemoteConfig struct {
// URLs the URLs of a remote repository. It must be non-empty. Fetch will
// always use the first URL, while push will use all of them.
URLs []string
+ // Mirror indicates that the repository is a mirror of remote.
+ Mirror bool
// insteadOfRulesApplied have urls been modified
insteadOfRulesApplied bool
@@ -603,6 +633,7 @@ func (c *RemoteConfig) unmarshal(s *format.Subsection) error {
c.Name = c.raw.Name
c.URLs = append([]string(nil), c.raw.Options.GetAll(urlKey)...)
c.Fetch = fetch
+ c.Mirror = c.raw.Options.Get(mirrorKey) == "true"
return nil
}
@@ -635,6 +666,10 @@ func (c *RemoteConfig) marshal() *format.Subsection {
c.raw.SetOption(fetchKey, values...)
}
+ if c.Mirror {
+ c.raw.SetOption(mirrorKey, strconv.FormatBool(c.Mirror))
+ }
+
return c.raw
}
diff --git a/config/refspec.go b/config/refspec.go
index 4bfaa37..e2cf8c9 100644
--- a/config/refspec.go
+++ b/config/refspec.go
@@ -64,7 +64,7 @@ func (s RefSpec) IsExactSHA1() bool {
return plumbing.IsHash(s.Src())
}
-// Src return the src side.
+// Src returns the src side.
func (s RefSpec) Src() string {
spec := string(s)
diff --git a/example_test.go b/example_test.go
index bba2961..27ea4a2 100644
--- a/example_test.go
+++ b/example_test.go
@@ -3,7 +3,6 @@ package git_test
import (
"fmt"
"io"
- "io/ioutil"
"log"
"os"
"path/filepath"
@@ -44,7 +43,7 @@ func ExampleClone() {
func ExamplePlainClone() {
// Tempdir to clone the repository
- dir, err := ioutil.TempDir("", "clone-example")
+ dir, err := os.MkdirTemp("", "clone-example")
if err != nil {
log.Fatal(err)
}
@@ -72,7 +71,7 @@ func ExamplePlainClone() {
func ExamplePlainClone_usernamePassword() {
// Tempdir to clone the repository
- dir, err := ioutil.TempDir("", "clone-example")
+ dir, err := os.MkdirTemp("", "clone-example")
if err != nil {
log.Fatal(err)
}
@@ -95,7 +94,7 @@ func ExamplePlainClone_usernamePassword() {
func ExamplePlainClone_accessToken() {
// Tempdir to clone the repository
- dir, err := ioutil.TempDir("", "clone-example")
+ dir, err := os.MkdirTemp("", "clone-example")
if err != nil {
log.Fatal(err)
}
diff --git a/go.mod b/go.mod
index ccb8fac..ea47145 100644
--- a/go.mod
+++ b/go.mod
@@ -1,30 +1,43 @@
module github.com/go-git/go-git/v5
+// go-git supports the last 3 stable Go versions.
+go 1.18
+
require (
- github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7
- github.com/acomagu/bufpipe v1.0.3
- github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 // indirect
+ dario.cat/mergo v1.0.0
+ github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95
+ github.com/acomagu/bufpipe v1.0.4
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5
- github.com/emirpasic/gods v1.12.0
- github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 // indirect
- github.com/gliderlabs/ssh v0.2.2
- github.com/go-git/gcfg v1.5.0
- github.com/go-git/go-billy/v5 v5.3.1
- github.com/go-git/go-git-fixtures/v4 v4.2.1
- github.com/google/go-cmp v0.3.0
- github.com/imdario/mergo v0.3.12
+ github.com/elazarl/goproxy v0.0.0-20221015165544-a0805db90819
+ github.com/emirpasic/gods v1.18.1
+ github.com/gliderlabs/ssh v0.3.5
+ github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376
+ github.com/go-git/go-billy/v5 v5.4.1
+ github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20230305113008-0c11038e723f
+ github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
+ github.com/google/go-cmp v0.5.9
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99
github.com/jessevdk/go-flags v1.5.0
- github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351
- github.com/mitchellh/go-homedir v1.1.0
+ github.com/kevinburke/ssh_config v1.2.0
+ github.com/pjbgf/sha1cd v0.3.0
github.com/sergi/go-diff v1.1.0
- github.com/xanzy/ssh-agent v0.3.1
- golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97
- golang.org/x/net v0.0.0-20210326060303-6b1517762897
- golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c
- golang.org/x/text v0.3.3
+ github.com/skeema/knownhosts v1.2.0
+ github.com/xanzy/ssh-agent v0.3.3
+ golang.org/x/crypto v0.11.0
+ golang.org/x/net v0.12.0
+ golang.org/x/sys v0.10.0
+ golang.org/x/text v0.11.0
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c
- gopkg.in/warnings.v0 v0.1.2 // indirect
)
-go 1.13
+require (
+ github.com/Microsoft/go-winio v0.6.1 // indirect
+ github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
+ github.com/cloudflare/circl v1.3.3 // indirect
+ github.com/kr/pretty v0.3.1 // indirect
+ github.com/kr/text v0.2.0 // indirect
+ github.com/rogpeppe/go-internal v1.9.0 // indirect
+ golang.org/x/mod v0.8.0 // indirect
+ golang.org/x/tools v0.6.0 // indirect
+ gopkg.in/warnings.v0 v0.1.2 // indirect
+)
diff --git a/go.sum b/go.sum
index ab212c5..375a22a 100644
--- a/go.sum
+++ b/go.sum
@@ -1,86 +1,148 @@
-github.com/Microsoft/go-winio v0.5.0 h1:Elr9Wn+sGKPlkaBvwu4mTrxtmOp3F3yV9qhaHbXGjwU=
-github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84=
-github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 h1:YoJbenK9C67SkzkDfmQuVln04ygHj3vjZfd9FL+GmQQ=
-github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo=
-github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk=
-github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4=
-github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
-github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
+dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
+dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
+github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
+github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
+github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
+github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 h1:KLq8BE0KwCL+mmXnjLWEAOYO+2l2AE4YMmqG1ZpZHBs=
+github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0=
+github.com/acomagu/bufpipe v1.0.4 h1:e3H4WUzM3npvo5uv95QuJM3cQspFNtFBzvJ2oNjKIDQ=
+github.com/acomagu/bufpipe v1.0.4/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4=
+github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
+github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
+github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
+github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs=
+github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
-github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
-github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
-github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
-github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=
-github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
-github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=
-github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E=
-github.com/go-git/go-billy/v5 v5.2.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
-github.com/go-git/go-billy/v5 v5.3.1 h1:CPiOUAzKtMRvolEKw+bG1PLRpT7D3LIs3/3ey4Aiu34=
+github.com/elazarl/goproxy v0.0.0-20221015165544-a0805db90819 h1:RIB4cRk+lBqKK3Oy0r2gRX4ui7tuhiZq2SuTtTCi0/0=
+github.com/elazarl/goproxy v0.0.0-20221015165544-a0805db90819/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
+github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2 h1:dWB6v3RcOy03t/bUadywsbyrQwCqZeNIEX6M1OtSZOM=
+github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8=
+github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
+github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
+github.com/gliderlabs/ssh v0.3.5 h1:OcaySEmAQJgyYcArR+gGGTHCyE7nvhEMTlYY+Dp8CpY=
+github.com/gliderlabs/ssh v0.3.5/go.mod h1:8XB4KraRrX39qHhT6yxPsHedjA08I/uBVwj4xC+/+z4=
+github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
+github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
github.com/go-git/go-billy/v5 v5.3.1/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
-github.com/go-git/go-git-fixtures/v4 v4.2.1 h1:n9gGL1Ct/yIw+nfsfr8s4+sbhT+Ncu2SubfXjIWgci8=
-github.com/go-git/go-git-fixtures/v4 v4.2.1/go.mod h1:K8zd3kDUAykwTdDCr+I0per6Y6vMiRR/nnVTBtavnB0=
-github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
-github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
-github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=
-github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
+github.com/go-git/go-billy/v5 v5.4.1 h1:Uwp5tDRkPr+l/TnbHOQzp+tmJfLceOlbVucgpTz8ix4=
+github.com/go-git/go-billy/v5 v5.4.1/go.mod h1:vjbugF6Fz7JIflbVpl1hJsGjSHNltrSw45YK/ukIvQg=
+github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20230305113008-0c11038e723f h1:Pz0DHeFij3XFhoBRGUDPzSJ+w2UcK5/0JvF8DRI58r8=
+github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20230305113008-0c11038e723f/go.mod h1:8LHG1a3SRW71ettAD/jW13h8c6AqjVSeL11RAdgaqpo=
+github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
+github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
+github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
-github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck=
-github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
+github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
+github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
-github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/matryer/is v1.2.0 h1:92UTHpy8CDwaJ08GqLDzhhuixiBUUD1p3AU6PHddz4A=
github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA=
-github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
-github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
+github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4=
+github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rogpeppe/go-charset v0.0.0-20180617210344-2471d30d28b4/go.mod h1:qgYeAmZ5ZIpBWTGllZSQnw97Dj+woV0toclVaRGI8pc=
+github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
+github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
+github.com/skeema/knownhosts v1.2.0 h1:h9r9cf0+u7wSE+M183ZtMGgOJKiL96brpaz5ekfJCpM=
+github.com/skeema/knownhosts v1.2.0/go.mod h1:g4fPeYpque7P0xefxtGzV81ihjC8sX2IqpAoNkjxbMo=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-github.com/xanzy/ssh-agent v0.3.1 h1:AmzO1SSWxw73zxFZPRwaMN1MohDw8UyHnmuxyceTEGo=
-github.com/xanzy/ssh-agent v0.3.1/go.mod h1:QIE4lCeL7nkC25x+yA3LBIYfwCc1TFziCtG7cBAac6w=
-golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
-golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 h1:/UOmuWzQfxxo9UtlXMwuQU8CMgg1eZXqTRwkSQJWKOI=
-golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
+github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
+golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
+golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
+golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
+golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA=
+golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8=
+golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
-golang.org/x/net v0.0.0-20210326060303-6b1517762897 h1:KrsHThm5nFk34YtATK1LsThyGhGbGe1olrte/HInHvs=
-golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k=
+golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
+golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
+golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
+golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
+golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50=
+golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
+golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I=
-golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220825204002-c680a09ffe64/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
+golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
-golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
+golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
+golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
+golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
+golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
+golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM=
+golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -90,7 +152,6 @@ gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
-gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/internal/path_util/path_util.go b/internal/path_util/path_util.go
new file mode 100644
index 0000000..48e4a3d
--- /dev/null
+++ b/internal/path_util/path_util.go
@@ -0,0 +1,29 @@
+package path_util
+
+import (
+ "os"
+ "os/user"
+ "strings"
+)
+
+func ReplaceTildeWithHome(path string) (string, error) {
+ if strings.HasPrefix(path, "~") {
+ firstSlash := strings.Index(path, "/")
+ if firstSlash == 1 {
+ home, err := os.UserHomeDir()
+ if err != nil {
+ return path, err
+ }
+ return strings.Replace(path, "~", home, 1), nil
+ } else if firstSlash > 1 {
+ username := path[1:firstSlash]
+ userAccount, err := user.Lookup(username)
+ if err != nil {
+ return path, err
+ }
+ return strings.Replace(path, path[:firstSlash], userAccount.HomeDir, 1), nil
+ }
+ }
+
+ return path, nil
+}
diff --git a/internal/revision/parser.go b/internal/revision/parser.go
index 8facf17..8a2a719 100644
--- a/internal/revision/parser.go
+++ b/internal/revision/parser.go
@@ -322,6 +322,8 @@ func (p *Parser) parseAt() (Revisioner, error) {
}
return AtDate{t}, nil
+ case tok == eof:
+ return nil, &ErrInvalidRevision{s: `missing "}" in @{<data>} structure`}
default:
date += lit
}
@@ -424,6 +426,8 @@ func (p *Parser) parseCaretBraces() (Revisioner, error) {
p.unscan()
case tok != slash && start:
return nil, &ErrInvalidRevision{fmt.Sprintf(`"%s" is not a valid revision suffix brace component`, lit)}
+ case tok == eof:
+ return nil, &ErrInvalidRevision{s: `missing "}" in ^{<data>} structure`}
case tok != cbrace:
p.unscan()
re += lit
diff --git a/internal/revision/parser_test.go b/internal/revision/parser_test.go
index 98403cc..3a77b2f 100644
--- a/internal/revision/parser_test.go
+++ b/internal/revision/parser_test.go
@@ -183,7 +183,7 @@ func (s *ParserSuite) TestParseWithValidExpression(c *C) {
}
}
-func (s *ParserSuite) TestParseWithUnValidExpression(c *C) {
+func (s *ParserSuite) TestParseWithInvalidExpression(c *C) {
datas := map[string]error{
"..": &ErrInvalidRevision{`must not start with "."`},
"master^1master": &ErrInvalidRevision{`reference must be defined once at the beginning`},
@@ -198,6 +198,9 @@ func (s *ParserSuite) TestParseWithUnValidExpression(c *C) {
"~1": &ErrInvalidRevision{`"~" or "^" statement must have a reference defined at the beginning`},
"master:/test": &ErrInvalidRevision{`":" statement is not valid, could be : :/<regexp>`},
"master:0:README": &ErrInvalidRevision{`":" statement is not valid, could be : :<n>:<path>`},
+ "^{/": &ErrInvalidRevision{`missing "}" in ^{<data>} structure`},
+ "~@{": &ErrInvalidRevision{`missing "}" in @{<data>} structure`},
+ "@@{{0": &ErrInvalidRevision{`missing "}" in @{<data>} structure`},
}
for s, e := range datas {
@@ -230,7 +233,7 @@ func (s *ParserSuite) TestParseAtWithValidExpression(c *C) {
}
}
-func (s *ParserSuite) TestParseAtWithUnValidExpression(c *C) {
+func (s *ParserSuite) TestParseAtWithInvalidExpression(c *C) {
datas := map[string]error{
"{test}": &ErrInvalidRevision{`wrong date "test" must fit ISO-8601 format : 2006-01-02T15:04:05Z`},
"{-1": &ErrInvalidRevision{`missing "}" in @{-n} structure`},
diff --git a/internal/url/url.go b/internal/url/url.go
index 14cf133..2662448 100644
--- a/internal/url/url.go
+++ b/internal/url/url.go
@@ -5,8 +5,10 @@ import (
)
var (
- isSchemeRegExp = regexp.MustCompile(`^[^:]+://`)
- scpLikeUrlRegExp = regexp.MustCompile(`^(?:(?P<user>[^@]+)@)?(?P<host>[^:\s]+):(?:(?P<port>[0-9]{1,5})(?:\/|:))?(?P<path>[^\\].*\/[^\\].*)$`)
+ isSchemeRegExp = regexp.MustCompile(`^[^:]+://`)
+
+ // Ref: https://github.com/git/git/blob/master/Documentation/urls.txt#L37
+ scpLikeUrlRegExp = regexp.MustCompile(`^(?:(?P<user>[^@]+)@)?(?P<host>[^:\s]+):(?:(?P<port>[0-9]{1,5}):)?(?P<path>[^\\].*)$`)
)
// MatchesScheme returns true if the given string matches a URL-like
diff --git a/internal/url/url_test.go b/internal/url/url_test.go
index d168db6..29c3f3e 100755
--- a/internal/url/url_test.go
+++ b/internal/url/url_test.go
@@ -13,11 +13,27 @@ type URLSuite struct{}
var _ = Suite(&URLSuite{})
func (s *URLSuite) TestMatchesScpLike(c *C) {
+ // See https://github.com/git/git/blob/master/Documentation/urls.txt#L37
examples := []string{
+ // Most-extended case
"git@github.com:james/bond",
- "git@github.com:007/bond",
+ // Most-extended case with port
"git@github.com:22:james/bond",
+ // Most-extended case with numeric path
+ "git@github.com:007/bond",
+ // Most-extended case with port and numeric "username"
"git@github.com:22:007/bond",
+ // Single repo path
+ "git@github.com:bond",
+ // Single repo path with port
+ "git@github.com:22:bond",
+ // Single repo path with port and numeric repo
+ "git@github.com:22:007",
+ // Repo path ending with .git and starting with _
+ "git@github.com:22:_007.git",
+ "git@github.com:_007.git",
+ "git@github.com:_james.git",
+ "git@github.com:_james/bond.git",
}
for _, url := range examples {
@@ -26,35 +42,68 @@ func (s *URLSuite) TestMatchesScpLike(c *C) {
}
func (s *URLSuite) TestFindScpLikeComponents(c *C) {
- url := "git@github.com:james/bond"
- user, host, port, path := FindScpLikeComponents(url)
-
- c.Check(user, Equals, "git")
- c.Check(host, Equals, "github.com")
- c.Check(port, Equals, "")
- c.Check(path, Equals, "james/bond")
-
- url = "git@github.com:007/bond"
- user, host, port, path = FindScpLikeComponents(url)
-
- c.Check(user, Equals, "git")
- c.Check(host, Equals, "github.com")
- c.Check(port, Equals, "")
- c.Check(path, Equals, "007/bond")
-
- url = "git@github.com:22:james/bond"
- user, host, port, path = FindScpLikeComponents(url)
+ testCases := []struct {
+ url, user, host, port, path string
+ }{
+ {
+ // Most-extended case
+ url: "git@github.com:james/bond", user: "git", host: "github.com", port: "", path: "james/bond",
+ },
+ {
+ // Most-extended case with port
+ url: "git@github.com:22:james/bond", user: "git", host: "github.com", port: "22", path: "james/bond",
+ },
+ {
+ // Most-extended case with numeric path
+ url: "git@github.com:007/bond", user: "git", host: "github.com", port: "", path: "007/bond",
+ },
+ {
+ // Most-extended case with port and numeric path
+ url: "git@github.com:22:007/bond", user: "git", host: "github.com", port: "22", path: "007/bond",
+ },
+ {
+ // Single repo path
+ url: "git@github.com:bond", user: "git", host: "github.com", port: "", path: "bond",
+ },
+ {
+ // Single repo path with port
+ url: "git@github.com:22:bond", user: "git", host: "github.com", port: "22", path: "bond",
+ },
+ {
+ // Single repo path with port and numeric path
+ url: "git@github.com:22:007", user: "git", host: "github.com", port: "22", path: "007",
+ },
+ {
+ // Repo path ending with .git and starting with _
+ url: "git@github.com:22:_007.git", user: "git", host: "github.com", port: "22", path: "_007.git",
+ },
+ {
+ // Repo path ending with .git and starting with _
+ url: "git@github.com:_007.git", user: "git", host: "github.com", port: "", path: "_007.git",
+ },
+ {
+ // Repo path ending with .git and starting with _
+ url: "git@github.com:_james.git", user: "git", host: "github.com", port: "", path: "_james.git",
+ },
+ {
+ // Repo path ending with .git and starting with _
+ url: "git@github.com:_james/bond.git", user: "git", host: "github.com", port: "", path: "_james/bond.git",
+ },
+ }
- c.Check(user, Equals, "git")
- c.Check(host, Equals, "github.com")
- c.Check(port, Equals, "22")
- c.Check(path, Equals, "james/bond")
+ for _, tc := range testCases {
+ user, host, port, path := FindScpLikeComponents(tc.url)
- url = "git@github.com:22:007/bond"
- user, host, port, path = FindScpLikeComponents(url)
+ logf := func(ok bool) {
+ if ok {
+ return
+ }
+ c.Logf("%q check failed", tc.url)
+ }
- c.Check(user, Equals, "git")
- c.Check(host, Equals, "github.com")
- c.Check(port, Equals, "22")
- c.Check(path, Equals, "007/bond")
+ logf(c.Check(user, Equals, tc.user))
+ logf(c.Check(host, Equals, tc.host))
+ logf(c.Check(port, Equals, tc.port))
+ logf(c.Check(path, Equals, tc.path))
+ }
}
diff --git a/options.go b/options.go
index a2d62bd..757bdc8 100644
--- a/options.go
+++ b/options.go
@@ -10,6 +10,7 @@ import (
"github.com/ProtonMail/go-crypto/openpgp"
"github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing"
+ formatcfg "github.com/go-git/go-git/v5/plumbing/format/config"
"github.com/go-git/go-git/v5/plumbing/object"
"github.com/go-git/go-git/v5/plumbing/protocol/packp/sideband"
"github.com/go-git/go-git/v5/plumbing/transport"
@@ -45,6 +46,14 @@ type CloneOptions struct {
ReferenceName plumbing.ReferenceName
// Fetch only ReferenceName if true.
SingleBranch bool
+ // Mirror clones the repository as a mirror.
+ //
+ // Compared to a bare clone, mirror not only maps local branches of the
+ // source to local branches of the target, it maps all refs (including
+ // remote-tracking branches, notes etc.) and sets up a refspec configuration
+ // such that all these refs are overwritten by a git remote update in the
+ // target repository.
+ Mirror bool
// No checkout of HEAD after clone if true.
NoCheckout bool
// Limit fetching to the specified number of commits.
@@ -53,6 +62,9 @@ type CloneOptions struct {
// within, using their default settings. This option is ignored if the
// cloned repository does not have a worktree.
RecurseSubmodules SubmoduleRescursivity
+ // ShallowSubmodules limit cloning submodules to the 1 level of depth.
+ // It matches the git command --shallow-submodules.
+ ShallowSubmodules bool
// Progress is where the human readable information sent by the server is
// stored, if nil nothing is stored and the capability (if supported)
// no-progress, is sent to the server to avoid send this information.
@@ -64,6 +76,8 @@ type CloneOptions struct {
InsecureSkipTLS bool
// CABundle specify additional ca bundle with system cert pool
CABundle []byte
+ // ProxyOptions provides info required for connecting to a proxy.
+ ProxyOptions transport.ProxyOptions
}
// Validate validates the fields and sets the default values.
@@ -115,6 +129,8 @@ type PullOptions struct {
InsecureSkipTLS bool
// CABundle specify additional ca bundle with system cert pool
CABundle []byte
+ // ProxyOptions provides info required for connecting to a proxy.
+ ProxyOptions transport.ProxyOptions
}
// Validate validates the fields and sets the default values.
@@ -171,6 +187,8 @@ type FetchOptions struct {
InsecureSkipTLS bool
// CABundle specify additional ca bundle with system cert pool
CABundle []byte
+ // ProxyOptions provides info required for connecting to a proxy.
+ ProxyOptions transport.ProxyOptions
}
// Validate validates the fields and sets the default values.
@@ -218,7 +236,7 @@ type PushOptions struct {
// Force allows the push to update a remote branch even when the local
// branch does not descend from it.
Force bool
- // InsecureSkipTLS skips ssl verify if protocal is https
+ // InsecureSkipTLS skips ssl verify if protocol is https
InsecureSkipTLS bool
// CABundle specify additional ca bundle with system cert pool
CABundle []byte
@@ -234,6 +252,8 @@ type PushOptions struct {
Options map[string]string
// Atomic sets option to be an atomic push
Atomic bool
+ // ProxyOptions provides info required for connecting to a proxy.
+ ProxyOptions transport.ProxyOptions
}
// ForceWithLease sets fields on the lease
@@ -283,6 +303,9 @@ type SubmoduleUpdateOptions struct {
RecurseSubmodules SubmoduleRescursivity
// Auth credentials, if required, to use with the remote repository.
Auth transport.AuthMethod
+ // Depth limit fetching to the specified number of commits from the tip of
+ // each remote branch history.
+ Depth int
}
var (
@@ -402,7 +425,7 @@ type LogOptions struct {
// Show only those commits in which the specified file was inserted/updated.
// It is equivalent to running `git log -- <file-name>`.
- // this field is kept for compatility, it can be replaced with PathFilter
+ // this field is kept for compatibility, it can be replaced with PathFilter
FileName *string
// Filter commits based on the path of files that are updated
@@ -458,6 +481,10 @@ type CommitOptions struct {
// All automatically stage files that have been modified and deleted, but
// new files you have not told Git about are not affected.
All bool
+ // AllowEmptyCommits enable empty commits to be created. An empty commit
+ // is when no changes to the tree were made, but a new commit message is
+ // provided. The default behavior is false, which results in ErrEmptyCommit.
+ AllowEmptyCommits bool
// Author is the author's signature of the commit. If Author is empty the
// Name and Email is read from the config, and time.Now it's used as When.
Author *object.Signature
@@ -618,12 +645,35 @@ func (o *CreateTagOptions) loadConfigTagger(r *Repository) error {
type ListOptions struct {
// Auth credentials, if required, to use with the remote repository.
Auth transport.AuthMethod
- // InsecureSkipTLS skips ssl verify if protocal is https
+ // InsecureSkipTLS skips ssl verify if protocol is https
InsecureSkipTLS bool
// CABundle specify additional ca bundle with system cert pool
CABundle []byte
+ // PeelingOption defines how peeled objects are handled during a
+ // remote list.
+ PeelingOption PeelingOption
+ // ProxyOptions provides info required for connecting to a proxy.
+ ProxyOptions transport.ProxyOptions
+ // Timeout specifies the timeout in seconds for list operations
+ Timeout int
}
+// PeelingOption represents the different ways to handle peeled references.
+//
+// Peeled references represent the underlying object of an annotated
+// (or signed) tag. Refer to upstream documentation for more info:
+// https://github.com/git/git/blob/master/Documentation/technical/reftable.txt
+type PeelingOption uint8
+
+const (
+ // IgnorePeeled ignores all peeled reference names. This is the default behavior.
+ IgnorePeeled PeelingOption = 0
+ // OnlyPeeled returns only peeled reference names.
+ OnlyPeeled PeelingOption = 1
+ // AppendPeeled appends peeled reference names to the reference list.
+ AppendPeeled PeelingOption = 2
+)
+
// CleanOptions describes how a clean should be performed.
type CleanOptions struct {
Dir bool
@@ -648,7 +698,13 @@ var (
)
// Validate validates the fields and sets the default values.
+//
+// TODO: deprecate in favor of Validate(r *Repository) in v6.
func (o *GrepOptions) Validate(w *Worktree) error {
+ return o.validate(w.r)
+}
+
+func (o *GrepOptions) validate(r *Repository) error {
if !o.CommitHash.IsZero() && o.ReferenceName != "" {
return ErrHashOrReference
}
@@ -656,7 +712,7 @@ func (o *GrepOptions) Validate(w *Worktree) error {
// If none of CommitHash and ReferenceName are provided, set commit hash of
// the repository's head.
if o.CommitHash.IsZero() && o.ReferenceName == "" {
- ref, err := w.r.Head()
+ ref, err := r.Head()
if err != nil {
return err
}
@@ -679,3 +735,10 @@ type PlainOpenOptions struct {
// Validate validates the fields and sets the default values.
func (o *PlainOpenOptions) Validate() error { return nil }
+
+type PlainInitOptions struct {
+ ObjectFormat formatcfg.ObjectFormat
+}
+
+// Validate validates the fields and sets the default values.
+func (o *PlainInitOptions) Validate() error { return nil }
diff --git a/plumbing/format/commitgraph/encoder.go b/plumbing/format/commitgraph/encoder.go
index d34076f..f61025b 100644
--- a/plumbing/format/commitgraph/encoder.go
+++ b/plumbing/format/commitgraph/encoder.go
@@ -1,11 +1,10 @@
package commitgraph
import (
- "crypto/sha1"
- "hash"
"io"
"github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/utils/binary"
)
@@ -17,7 +16,7 @@ type Encoder struct {
// NewEncoder returns a new stream encoder that writes to w.
func NewEncoder(w io.Writer) *Encoder {
- h := sha1.New()
+ h := hash.New(hash.CryptoType)
mw := io.MultiWriter(w, h)
return &Encoder{mw, h}
}
@@ -31,7 +30,7 @@ func (e *Encoder) Encode(idx Index) error {
hashToIndex, fanout, extraEdgesCount := e.prepare(idx, hashes)
chunkSignatures := [][]byte{oidFanoutSignature, oidLookupSignature, commitDataSignature}
- chunkSizes := []uint64{4 * 256, uint64(len(hashes)) * 20, uint64(len(hashes)) * 36}
+ chunkSizes := []uint64{4 * 256, uint64(len(hashes)) * hash.Size, uint64(len(hashes)) * 36}
if extraEdgesCount > 0 {
chunkSignatures = append(chunkSignatures, extraEdgeListSignature)
chunkSizes = append(chunkSizes, uint64(extraEdgesCount)*4)
@@ -183,6 +182,6 @@ func (e *Encoder) encodeExtraEdges(extraEdges []uint32) (err error) {
}
func (e *Encoder) encodeChecksum() error {
- _, err := e.Write(e.hash.Sum(nil)[:20])
+ _, err := e.Write(e.hash.Sum(nil)[:hash.Size])
return err
}
diff --git a/plumbing/format/config/encoder.go b/plumbing/format/config/encoder.go
index 4eac896..de069ae 100644
--- a/plumbing/format/config/encoder.go
+++ b/plumbing/format/config/encoder.go
@@ -11,6 +11,10 @@ type Encoder struct {
w io.Writer
}
+var (
+ subsectionReplacer = strings.NewReplacer(`"`, `\"`, `\`, `\\`)
+ valueReplacer = strings.NewReplacer(`"`, `\"`, `\`, `\\`, "\n", `\n`, "\t", `\t`, "\b", `\b`)
+)
// NewEncoder returns a new encoder that writes to w.
func NewEncoder(w io.Writer) *Encoder {
return &Encoder{w}
@@ -48,8 +52,7 @@ func (e *Encoder) encodeSection(s *Section) error {
}
func (e *Encoder) encodeSubsection(sectionName string, s *Subsection) error {
- //TODO: escape
- if err := e.printf("[%s \"%s\"]\n", sectionName, s.Name); err != nil {
+ if err := e.printf("[%s \"%s\"]\n", sectionName, subsectionReplacer.Replace(s.Name)); err != nil {
return err
}
@@ -58,12 +61,14 @@ func (e *Encoder) encodeSubsection(sectionName string, s *Subsection) error {
func (e *Encoder) encodeOptions(opts Options) error {
for _, o := range opts {
- pattern := "\t%s = %s\n"
- if strings.Contains(o.Value, "\\") {
- pattern = "\t%s = %q\n"
+ var value string
+ if strings.ContainsAny(o.Value, "#;\"\t\n\\") || strings.HasPrefix(o.Value, " ") || strings.HasSuffix(o.Value, " ") {
+ value = `"`+valueReplacer.Replace(o.Value)+`"`
+ } else {
+ value = o.Value
}
- if err := e.printf(pattern, o.Key, o.Value); err != nil {
+ if err := e.printf("\t%s = %s\n", o.Key, value); err != nil {
return err
}
}
diff --git a/plumbing/format/config/fixtures_test.go b/plumbing/format/config/fixtures_test.go
index f3533df..2fa7840 100644
--- a/plumbing/format/config/fixtures_test.go
+++ b/plumbing/format/config/fixtures_test.go
@@ -43,6 +43,41 @@ var fixtures = []*Fixture{
Config: New().AddOption("core", "", "repositoryformatversion", "0"),
},
{
+ Raw: `[section]
+ option1 = "has # hash"
+ option2 = "has \" quote"
+ option3 = "has \\ backslash"
+ option4 = "has ; semicolon"
+ option5 = "has \n line-feed"
+ option6 = "has \t tab"
+ option7 = " has leading spaces"
+ option8 = "has trailing spaces "
+ option9 = has no special characters
+ option10 = has unusual ` + "\x01\x7f\xc8\x80 characters\n",
+ Text: `[section]
+ option1 = "has # hash"
+ option2 = "has \" quote"
+ option3 = "has \\ backslash"
+ option4 = "has ; semicolon"
+ option5 = "has \n line-feed"
+ option6 = "has \t tab"
+ option7 = " has leading spaces"
+ option8 = "has trailing spaces "
+ option9 = has no special characters
+ option10 = has unusual ` + "\x01\x7f\xc8\x80 characters\n",
+ Config: New().
+ AddOption("section", "", "option1", `has # hash`).
+ AddOption("section", "", "option2", `has " quote`).
+ AddOption("section", "", "option3", `has \ backslash`).
+ AddOption("section", "", "option4", `has ; semicolon`).
+ AddOption("section", "", "option5", "has \n line-feed").
+ AddOption("section", "", "option6", "has \t tab").
+ AddOption("section", "", "option7", ` has leading spaces`).
+ AddOption("section", "", "option8", `has trailing spaces `).
+ AddOption("section", "", "option9", `has no special characters`).
+ AddOption("section", "", "option10", "has unusual \x01\x7f\u0200 characters"),
+ },
+ {
Raw: `
[sect1]
opt1 = value1
diff --git a/plumbing/format/config/format.go b/plumbing/format/config/format.go
new file mode 100644
index 0000000..4873ea9
--- /dev/null
+++ b/plumbing/format/config/format.go
@@ -0,0 +1,53 @@
+package config
+
+// RepositoryFormatVersion represents the repository format version,
+// as per defined at:
+//
+// https://git-scm.com/docs/repository-version
+type RepositoryFormatVersion string
+
+const (
+ // Version_0 is the format defined by the initial version of git,
+ // including but not limited to the format of the repository
+ // directory, the repository configuration file, and the object
+ // and ref storage.
+ //
+ // Specifying the complete behavior of git is beyond the scope
+ // of this document.
+ Version_0 = "0"
+
+ // Version_1 is identical to version 0, with the following exceptions:
+ //
+ // 1. When reading the core.repositoryformatversion variable, a git
+ // implementation which supports version 1 MUST also read any
+ // configuration keys found in the extensions section of the
+ // configuration file.
+ //
+ // 2. If a version-1 repository specifies any extensions.* keys that
+ // the running git has not implemented, the operation MUST NOT proceed.
+ // Similarly, if the value of any known key is not understood by the
+ // implementation, the operation MUST NOT proceed.
+ //
+ // Note that if no extensions are specified in the config file, then
+ // core.repositoryformatversion SHOULD be set to 0 (setting it to 1 provides
+ // no benefit, and makes the repository incompatible with older
+ // implementations of git).
+ Version_1 = "1"
+
+ // DefaultRepositoryFormatVersion holds the default repository format version.
+ DefaultRepositoryFormatVersion = Version_0
+)
+
+// ObjectFormat defines the object format.
+type ObjectFormat string
+
+const (
+ // SHA1 represents the object format used for SHA1.
+ SHA1 ObjectFormat = "sha1"
+
+ // SHA256 represents the object format used for SHA256.
+ SHA256 ObjectFormat = "sha256"
+
+ // DefaultObjectFormat holds the default object format.
+ DefaultObjectFormat = SHA1
+)
diff --git a/plumbing/format/config/section.go b/plumbing/format/config/section.go
index 07f72f3..4625ac5 100644
--- a/plumbing/format/config/section.go
+++ b/plumbing/format/config/section.go
@@ -103,7 +103,7 @@ func (s *Section) RemoveSubsection(name string) *Section {
return s
}
-// Option return the value for the specified key. Empty string is returned if
+// Option returns the value for the specified key. Empty string is returned if
// key does not exists.
func (s *Section) Option(key string) string {
return s.Options.Get(key)
diff --git a/plumbing/format/diff/patch.go b/plumbing/format/diff/patch.go
index 39a66a1..c7678b0 100644
--- a/plumbing/format/diff/patch.go
+++ b/plumbing/format/diff/patch.go
@@ -9,7 +9,7 @@ import (
type Operation int
const (
- // Equal item represents a equals diff.
+ // Equal item represents an equals diff.
Equal Operation = iota
// Add item represents an insert diff.
Add
@@ -26,15 +26,15 @@ type Patch interface {
Message() string
}
-// FilePatch represents the necessary steps to transform one file to another.
+// FilePatch represents the necessary steps to transform one file into another.
type FilePatch interface {
// IsBinary returns true if this patch is representing a binary file.
IsBinary() bool
- // Files returns the from and to Files, with all the necessary metadata to
+ // Files returns the from and to Files, with all the necessary metadata
// about them. If the patch creates a new file, "from" will be nil.
// If the patch deletes a file, "to" will be nil.
Files() (from, to File)
- // Chunks returns a slice of ordered changes to transform "from" File to
+ // Chunks returns a slice of ordered changes to transform "from" File into
// "to" File. If the file is a binary one, Chunks will be empty.
Chunks() []Chunk
}
@@ -49,7 +49,7 @@ type File interface {
Path() string
}
-// Chunk represents a portion of a file transformation to another.
+// Chunk represents a portion of a file transformation into another.
type Chunk interface {
// Content contains the portion of the file.
Content() string
diff --git a/plumbing/format/gitattributes/attributes.go b/plumbing/format/gitattributes/attributes.go
index 329e667..d36ec1b 100644
--- a/plumbing/format/gitattributes/attributes.go
+++ b/plumbing/format/gitattributes/attributes.go
@@ -3,7 +3,6 @@ package gitattributes
import (
"errors"
"io"
- "io/ioutil"
"strings"
)
@@ -89,7 +88,7 @@ func (a attribute) String() string {
// ReadAttributes reads patterns and attributes from the gitattributes format.
func ReadAttributes(r io.Reader, domain []string, allowMacro bool) (attributes []MatchAttribute, err error) {
- data, err := ioutil.ReadAll(r)
+ data, err := io.ReadAll(r)
if err != nil {
return nil, err
}
diff --git a/plumbing/format/gitattributes/pattern.go b/plumbing/format/gitattributes/pattern.go
index d961aba..f101f47 100644
--- a/plumbing/format/gitattributes/pattern.go
+++ b/plumbing/format/gitattributes/pattern.go
@@ -52,6 +52,11 @@ func (p *pattern) Match(path []string) bool {
var match, doublestar bool
var err error
for _, part := range path {
+ // path is deeper than pattern
+ if len(pattern) == 0 {
+ return false
+ }
+
// skip empty
if pattern[0] == "" {
pattern = pattern[1:]
diff --git a/plumbing/format/gitattributes/pattern_test.go b/plumbing/format/gitattributes/pattern_test.go
index f95be6e..981d56f 100644
--- a/plumbing/format/gitattributes/pattern_test.go
+++ b/plumbing/format/gitattributes/pattern_test.go
@@ -174,6 +174,12 @@ func (s *PatternSuite) TestGlobMatch_tailingAsterisks_single(c *C) {
c.Assert(r, Equals, true)
}
+func (s *PatternSuite) TestGlobMatch_tailingAsterisk_single(c *C) {
+ p := ParsePattern("/*lue/*", nil)
+ r := p.Match([]string{"value", "volcano", "tail"})
+ c.Assert(r, Equals, false)
+}
+
func (s *PatternSuite) TestGlobMatch_tailingAsterisks_exactMatch(c *C) {
p := ParsePattern("/*lue/vol?ano/**", nil)
r := p.Match([]string{"value", "volcano"})
diff --git a/plumbing/format/gitignore/dir.go b/plumbing/format/gitignore/dir.go
index 15bc9c7..d8fb30c 100644
--- a/plumbing/format/gitignore/dir.go
+++ b/plumbing/format/gitignore/dir.go
@@ -3,11 +3,12 @@ package gitignore
import (
"bufio"
"bytes"
- "io/ioutil"
+ "io"
"os"
"strings"
"github.com/go-git/go-billy/v5"
+ "github.com/go-git/go-git/v5/internal/path_util"
"github.com/go-git/go-git/v5/plumbing/format/config"
gioutil "github.com/go-git/go-git/v5/utils/ioutil"
)
@@ -25,6 +26,9 @@ const (
// readIgnoreFile reads a specific git ignore file.
func readIgnoreFile(fs billy.Filesystem, path []string, ignoreFile string) (ps []Pattern, err error) {
+
+ ignoreFile, _ = path_util.ReplaceTildeWithHome(ignoreFile)
+
f, err := fs.Open(fs.Join(append(path, ignoreFile)...))
if err == nil {
defer f.Close()
@@ -86,7 +90,7 @@ func loadPatterns(fs billy.Filesystem, path string) (ps []Pattern, err error) {
defer gioutil.CheckClose(f, &err)
- b, err := ioutil.ReadAll(f)
+ b, err := io.ReadAll(f)
if err != nil {
return
}
diff --git a/plumbing/format/gitignore/dir_test.go b/plumbing/format/gitignore/dir_test.go
index facc36d..465c571 100644
--- a/plumbing/format/gitignore/dir_test.go
+++ b/plumbing/format/gitignore/dir_test.go
@@ -2,7 +2,9 @@ package gitignore
import (
"os"
+ "os/user"
"strconv"
+ "strings"
"github.com/go-git/go-billy/v5"
"github.com/go-git/go-billy/v5/memfs"
@@ -12,6 +14,8 @@ import (
type MatcherSuite struct {
GFS billy.Filesystem // git repository root
RFS billy.Filesystem // root that contains user home
+ RFSR billy.Filesystem // root that contains user home, but with relative ~/.gitignore_global
+ RFSU billy.Filesystem // root that contains user home, but with relative ~user/.gitignore_global
MCFS billy.Filesystem // root that contains user home, but missing ~/.gitconfig
MEFS billy.Filesystem // root that contains user home, but missing excludesfile entry
MIFS billy.Filesystem // root that contains user home, but missing .gitignore
@@ -63,6 +67,27 @@ func (s *MatcherSuite) SetUpTest(c *C) {
err = fs.MkdirAll("vendor/gopkg.in", os.ModePerm)
c.Assert(err, IsNil)
+ err = fs.MkdirAll("multiple/sub/ignores/first", os.ModePerm)
+ c.Assert(err, IsNil)
+ err = fs.MkdirAll("multiple/sub/ignores/second", os.ModePerm)
+ c.Assert(err, IsNil)
+ f, err = fs.Create("multiple/sub/ignores/first/.gitignore")
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte("ignore_dir\n"))
+ c.Assert(err, IsNil)
+ err = f.Close()
+ c.Assert(err, IsNil)
+ f, err = fs.Create("multiple/sub/ignores/second/.gitignore")
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte("ignore_dir\n"))
+ c.Assert(err, IsNil)
+ err = f.Close()
+ c.Assert(err, IsNil)
+ err = fs.MkdirAll("multiple/sub/ignores/first/ignore_dir", os.ModePerm)
+ c.Assert(err, IsNil)
+ err = fs.MkdirAll("multiple/sub/ignores/second/ignore_dir", os.ModePerm)
+ c.Assert(err, IsNil)
+
s.GFS = fs
// setup root that contains user home
@@ -95,6 +120,64 @@ func (s *MatcherSuite) SetUpTest(c *C) {
s.RFS = fs
+ // root that contains user home, but with relative ~/.gitignore_global
+ fs = memfs.New()
+ err = fs.MkdirAll(home, os.ModePerm)
+ c.Assert(err, IsNil)
+
+ f, err = fs.Create(fs.Join(home, gitconfigFile))
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte("[core]\n"))
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte(" excludesfile = ~/.gitignore_global" + "\n"))
+ c.Assert(err, IsNil)
+ err = f.Close()
+ c.Assert(err, IsNil)
+
+ f, err = fs.Create(fs.Join(home, ".gitignore_global"))
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte("# IntelliJ\n"))
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte(".idea/\n"))
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte("*.iml\n"))
+ c.Assert(err, IsNil)
+ err = f.Close()
+ c.Assert(err, IsNil)
+
+ s.RFSR = fs
+
+ // root that contains user home, but with relative ~user/.gitignore_global
+ fs = memfs.New()
+ err = fs.MkdirAll(home, os.ModePerm)
+ c.Assert(err, IsNil)
+
+ f, err = fs.Create(fs.Join(home, gitconfigFile))
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte("[core]\n"))
+ c.Assert(err, IsNil)
+ currentUser, err := user.Current()
+ c.Assert(err, IsNil)
+ // remove domain for windows
+ username := currentUser.Username[strings.Index(currentUser.Username, "\\")+1:]
+ _, err = f.Write([]byte(" excludesfile = ~" + username + "/.gitignore_global" + "\n"))
+ c.Assert(err, IsNil)
+ err = f.Close()
+ c.Assert(err, IsNil)
+
+ f, err = fs.Create(fs.Join(home, ".gitignore_global"))
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte("# IntelliJ\n"))
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte(".idea/\n"))
+ c.Assert(err, IsNil)
+ _, err = f.Write([]byte("*.iml\n"))
+ c.Assert(err, IsNil)
+ err = f.Close()
+ c.Assert(err, IsNil)
+
+ s.RFSU = fs
+
// root that contains user home, but missing ~/.gitconfig
fs = memfs.New()
err = fs.MkdirAll(home, os.ModePerm)
@@ -183,15 +266,39 @@ func (s *MatcherSuite) SetUpTest(c *C) {
}
func (s *MatcherSuite) TestDir_ReadPatterns(c *C) {
+ checkPatterns := func(ps []Pattern) {
+ c.Assert(ps, HasLen, 6)
+ m := NewMatcher(ps)
+
+ c.Assert(m.Match([]string{"exclude.crlf"}, true), Equals, true)
+ c.Assert(m.Match([]string{"ignore.crlf"}, true), Equals, true)
+ c.Assert(m.Match([]string{"vendor", "gopkg.in"}, true), Equals, true)
+ c.Assert(m.Match([]string{"vendor", "github.com"}, true), Equals, false)
+ c.Assert(m.Match([]string{"multiple", "sub", "ignores", "first", "ignore_dir"}, true), Equals, true)
+ c.Assert(m.Match([]string{"multiple", "sub", "ignores", "second", "ignore_dir"}, true), Equals, true)
+ }
+
ps, err := ReadPatterns(s.GFS, nil)
c.Assert(err, IsNil)
- c.Assert(ps, HasLen, 4)
+ checkPatterns(ps)
- m := NewMatcher(ps)
- c.Assert(m.Match([]string{"exclude.crlf"}, true), Equals, true)
- c.Assert(m.Match([]string{"ignore.crlf"}, true), Equals, true)
- c.Assert(m.Match([]string{"vendor", "gopkg.in"}, true), Equals, true)
- c.Assert(m.Match([]string{"vendor", "github.com"}, true), Equals, false)
+ // passing an empty slice with capacity to check we don't hit a bug where the extra capacity is reused incorrectly
+ ps, err = ReadPatterns(s.GFS, make([]string, 0, 6))
+ c.Assert(err, IsNil)
+ checkPatterns(ps)
+}
+
+func (s *MatcherSuite) TestDir_ReadRelativeGlobalGitIgnore(c *C) {
+ for _, fs := range []billy.Filesystem{s.RFSR, s.RFSU} {
+ ps, err := LoadGlobalPatterns(fs)
+ c.Assert(err, IsNil)
+ c.Assert(ps, HasLen, 2)
+
+ m := NewMatcher(ps)
+ c.Assert(m.Match([]string{".idea/"}, true), Equals, false)
+ c.Assert(m.Match([]string{"*.iml"}, true), Equals, true)
+ c.Assert(m.Match([]string{"IntelliJ"}, true), Equals, false)
+ }
}
func (s *MatcherSuite) TestDir_LoadGlobalPatterns(c *C) {
diff --git a/plumbing/format/gitignore/pattern.go b/plumbing/format/gitignore/pattern.go
index 098cb50..450b3cd 100644
--- a/plumbing/format/gitignore/pattern.go
+++ b/plumbing/format/gitignore/pattern.go
@@ -39,6 +39,8 @@ type pattern struct {
// ParsePattern parses a gitignore pattern string into the Pattern structure.
func ParsePattern(p string, domain []string) Pattern {
+ // storing domain, copy it to ensure it isn't changed externally
+ domain = append([]string(nil), domain...)
res := pattern{domain: domain}
if strings.HasPrefix(p, inclusionPrefix) {
diff --git a/plumbing/format/idxfile/decoder.go b/plumbing/format/idxfile/decoder.go
index 51a3904..9afdce3 100644
--- a/plumbing/format/idxfile/decoder.go
+++ b/plumbing/format/idxfile/decoder.go
@@ -6,6 +6,7 @@ import (
"errors"
"io"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/utils/binary"
)
@@ -19,7 +20,7 @@ var (
const (
fanout = 256
- objectIDLength = 20
+ objectIDLength = hash.Size
)
// Decoder reads and decodes idx files from an input stream.
diff --git a/plumbing/format/idxfile/decoder_test.go b/plumbing/format/idxfile/decoder_test.go
index 94059cc..2c4a801 100644
--- a/plumbing/format/idxfile/decoder_test.go
+++ b/plumbing/format/idxfile/decoder_test.go
@@ -5,7 +5,6 @@ import (
"encoding/base64"
"fmt"
"io"
- "io/ioutil"
"testing"
"github.com/go-git/go-git/v5/plumbing"
@@ -119,7 +118,7 @@ ch2xUA==
func BenchmarkDecode(b *testing.B) {
f := fixtures.Basic().One()
- fixture, err := ioutil.ReadAll(f.Idx())
+ fixture, err := io.ReadAll(f.Idx())
if err != nil {
b.Errorf("unexpected error reading idx file: %s", err)
}
diff --git a/plumbing/format/idxfile/encoder.go b/plumbing/format/idxfile/encoder.go
index 26b2e4d..7514737 100644
--- a/plumbing/format/idxfile/encoder.go
+++ b/plumbing/format/idxfile/encoder.go
@@ -1,10 +1,9 @@
package idxfile
import (
- "crypto/sha1"
- "hash"
"io"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/utils/binary"
)
@@ -16,7 +15,7 @@ type Encoder struct {
// NewEncoder returns a new stream encoder that writes to w.
func NewEncoder(w io.Writer) *Encoder {
- h := sha1.New()
+ h := hash.New(hash.CryptoType)
mw := io.MultiWriter(w, h)
return &Encoder{mw, h}
}
@@ -133,10 +132,10 @@ func (e *Encoder) encodeChecksums(idx *MemoryIndex) (int, error) {
return 0, err
}
- copy(idx.IdxChecksum[:], e.hash.Sum(nil)[:20])
+ copy(idx.IdxChecksum[:], e.hash.Sum(nil)[:hash.Size])
if _, err := e.Write(idx.IdxChecksum[:]); err != nil {
return 0, err
}
- return 40, nil
+ return hash.HexSize, nil
}
diff --git a/plumbing/format/idxfile/encoder_test.go b/plumbing/format/idxfile/encoder_test.go
index 32b60f9..b8ece83 100644
--- a/plumbing/format/idxfile/encoder_test.go
+++ b/plumbing/format/idxfile/encoder_test.go
@@ -2,7 +2,7 @@ package idxfile_test
import (
"bytes"
- "io/ioutil"
+ "io"
. "github.com/go-git/go-git/v5/plumbing/format/idxfile"
@@ -12,7 +12,7 @@ import (
func (s *IdxfileSuite) TestDecodeEncode(c *C) {
fixtures.ByTag("packfile").Test(c, func(f *fixtures.Fixture) {
- expected, err := ioutil.ReadAll(f.Idx())
+ expected, err := io.ReadAll(f.Idx())
c.Assert(err, IsNil)
idx := new(MemoryIndex)
diff --git a/plumbing/format/idxfile/idxfile.go b/plumbing/format/idxfile/idxfile.go
index 64dd8dc..9237a74 100644
--- a/plumbing/format/idxfile/idxfile.go
+++ b/plumbing/format/idxfile/idxfile.go
@@ -8,6 +8,7 @@ import (
encbin "encoding/binary"
"github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/plumbing/hash"
)
const (
@@ -53,8 +54,8 @@ type MemoryIndex struct {
Offset32 [][]byte
CRC32 [][]byte
Offset64 []byte
- PackfileChecksum [20]byte
- IdxChecksum [20]byte
+ PackfileChecksum [hash.Size]byte
+ IdxChecksum [hash.Size]byte
offsetHash map[int64]plumbing.Hash
offsetHashIsFull bool
diff --git a/plumbing/format/idxfile/writer.go b/plumbing/format/idxfile/writer.go
index daa1605..c4c21e1 100644
--- a/plumbing/format/idxfile/writer.go
+++ b/plumbing/format/idxfile/writer.go
@@ -84,11 +84,8 @@ func (w *Writer) OnFooter(h plumbing.Hash) error {
w.checksum = h
w.finished = true
_, err := w.createIndex()
- if err != nil {
- return err
- }
- return nil
+ return err
}
// creatIndex returns a filled MemoryIndex with the information filled by
@@ -139,15 +136,23 @@ func (w *Writer) createIndex() (*MemoryIndex, error) {
offset := o.Offset
if offset > math.MaxInt32 {
- offset = w.addOffset64(offset)
+ var err error
+ offset, err = w.addOffset64(offset)
+ if err != nil {
+ return nil, err
+ }
}
buf.Truncate(0)
- binary.WriteUint32(buf, uint32(offset))
+ if err := binary.WriteUint32(buf, uint32(offset)); err != nil {
+ return nil, err
+ }
idx.Offset32[bucket] = append(idx.Offset32[bucket], buf.Bytes()...)
buf.Truncate(0)
- binary.WriteUint32(buf, o.CRC32)
+ if err := binary.WriteUint32(buf, o.CRC32); err != nil {
+ return nil, err
+ }
idx.CRC32[bucket] = append(idx.CRC32[bucket], buf.Bytes()...)
}
@@ -161,15 +166,17 @@ func (w *Writer) createIndex() (*MemoryIndex, error) {
return idx, nil
}
-func (w *Writer) addOffset64(pos uint64) uint64 {
+func (w *Writer) addOffset64(pos uint64) (uint64, error) {
buf := new(bytes.Buffer)
- binary.WriteUint64(buf, pos)
- w.index.Offset64 = append(w.index.Offset64, buf.Bytes()...)
+ if err := binary.WriteUint64(buf, pos); err != nil {
+ return 0, err
+ }
+ w.index.Offset64 = append(w.index.Offset64, buf.Bytes()...)
index := uint64(w.offset64 | (1 << 31))
w.offset64++
- return index
+ return index, nil
}
func (o objects) Len() int {
diff --git a/plumbing/format/idxfile/writer_test.go b/plumbing/format/idxfile/writer_test.go
index fba3e42..eaa8605 100644
--- a/plumbing/format/idxfile/writer_test.go
+++ b/plumbing/format/idxfile/writer_test.go
@@ -3,7 +3,7 @@ package idxfile_test
import (
"bytes"
"encoding/base64"
- "io/ioutil"
+ "io"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/format/idxfile"
@@ -34,7 +34,7 @@ func (s *WriterSuite) TestWriter(c *C) {
c.Assert(err, IsNil)
idxFile := f.Idx()
- expected, err := ioutil.ReadAll(idxFile)
+ expected, err := io.ReadAll(idxFile)
c.Assert(err, IsNil)
idxFile.Close()
@@ -65,7 +65,7 @@ func (s *WriterSuite) TestWriterLarge(c *C) {
// load fixture index
f := bytes.NewBufferString(fixtureLarge4GB)
- expected, err := ioutil.ReadAll(base64.NewDecoder(base64.StdEncoding, f))
+ expected, err := io.ReadAll(base64.NewDecoder(base64.StdEncoding, f))
c.Assert(err, IsNil)
buf := new(bytes.Buffer)
diff --git a/plumbing/format/index/decoder.go b/plumbing/format/index/decoder.go
index 036b636..6778cf7 100644
--- a/plumbing/format/index/decoder.go
+++ b/plumbing/format/index/decoder.go
@@ -3,15 +3,14 @@ package index
import (
"bufio"
"bytes"
- "crypto/sha1"
"errors"
- "hash"
"io"
- "io/ioutil"
+
"strconv"
"time"
"github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/utils/binary"
)
@@ -49,7 +48,7 @@ type Decoder struct {
// NewDecoder returns a new decoder that reads from r.
func NewDecoder(r io.Reader) *Decoder {
- h := sha1.New()
+ h := hash.New(hash.CryptoType)
return &Decoder{
r: io.TeeReader(r, h),
hash: h,
@@ -202,7 +201,7 @@ func (d *Decoder) padEntry(idx *Index, e *Entry, read int) error {
entrySize := read + len(e.Name)
padLen := 8 - entrySize%8
- _, err := io.CopyN(ioutil.Discard, d.r, int64(padLen))
+ _, err := io.CopyN(io.Discard, d.r, int64(padLen))
return err
}
diff --git a/plumbing/format/index/encoder.go b/plumbing/format/index/encoder.go
index 2c94d93..fa2d814 100644
--- a/plumbing/format/index/encoder.go
+++ b/plumbing/format/index/encoder.go
@@ -2,13 +2,12 @@ package index
import (
"bytes"
- "crypto/sha1"
"errors"
- "hash"
"io"
"sort"
"time"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/utils/binary"
)
@@ -29,7 +28,7 @@ type Encoder struct {
// NewEncoder returns a new encoder that writes to w.
func NewEncoder(w io.Writer) *Encoder {
- h := sha1.New()
+ h := hash.New(hash.CryptoType)
mw := io.MultiWriter(w, h)
return &Encoder{mw, h}
}
diff --git a/plumbing/format/objfile/reader.go b/plumbing/format/objfile/reader.go
index b6b2ca0..d7932f4 100644
--- a/plumbing/format/objfile/reader.go
+++ b/plumbing/format/objfile/reader.go
@@ -1,13 +1,13 @@
package objfile
import (
- "compress/zlib"
"errors"
"io"
"strconv"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/format/packfile"
+ "github.com/go-git/go-git/v5/utils/sync"
)
var (
@@ -20,20 +20,22 @@ var (
// Reader implements io.ReadCloser. Close should be called when finished with
// the Reader. Close will not close the underlying io.Reader.
type Reader struct {
- multi io.Reader
- zlib io.ReadCloser
- hasher plumbing.Hasher
+ multi io.Reader
+ zlib io.Reader
+ zlibref sync.ZLibReader
+ hasher plumbing.Hasher
}
// NewReader returns a new Reader reading from r.
func NewReader(r io.Reader) (*Reader, error) {
- zlib, err := zlib.NewReader(r)
+ zlib, err := sync.GetZlibReader(r)
if err != nil {
return nil, packfile.ErrZLib.AddDetails(err.Error())
}
return &Reader{
- zlib: zlib,
+ zlib: zlib.Reader,
+ zlibref: zlib,
}, nil
}
@@ -110,5 +112,6 @@ func (r *Reader) Hash() plumbing.Hash {
// Close releases any resources consumed by the Reader. Calling Close does not
// close the wrapped io.Reader originally passed to NewReader.
func (r *Reader) Close() error {
- return r.zlib.Close()
+ sync.PutZlibReader(r.zlibref)
+ return nil
}
diff --git a/plumbing/format/objfile/reader_test.go b/plumbing/format/objfile/reader_test.go
index d697d54..5526f7f 100644
--- a/plumbing/format/objfile/reader_test.go
+++ b/plumbing/format/objfile/reader_test.go
@@ -5,7 +5,6 @@ import (
"encoding/base64"
"fmt"
"io"
- "io/ioutil"
"github.com/go-git/go-git/v5/plumbing"
@@ -36,7 +35,7 @@ func testReader(c *C, source io.Reader, hash plumbing.Hash, t plumbing.ObjectTyp
c.Assert(typ, Equals, t)
c.Assert(content, HasLen, int(size))
- rc, err := ioutil.ReadAll(r)
+ rc, err := io.ReadAll(r)
c.Assert(err, IsNil)
c.Assert(rc, DeepEquals, content, Commentf("%scontent=%s, expected=%s", base64.StdEncoding.EncodeToString(rc), base64.StdEncoding.EncodeToString(content)))
diff --git a/plumbing/format/objfile/writer.go b/plumbing/format/objfile/writer.go
index 2a96a43..0d0f154 100644
--- a/plumbing/format/objfile/writer.go
+++ b/plumbing/format/objfile/writer.go
@@ -7,6 +7,7 @@ import (
"strconv"
"github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/utils/sync"
)
var (
@@ -18,9 +19,9 @@ var (
// not close the underlying io.Writer.
type Writer struct {
raw io.Writer
- zlib io.WriteCloser
hasher plumbing.Hasher
multi io.Writer
+ zlib *zlib.Writer
closed bool
pending int64 // number of unwritten bytes
@@ -31,9 +32,10 @@ type Writer struct {
// The returned Writer implements io.WriteCloser. Close should be called when
// finished with the Writer. Close will not close the underlying io.Writer.
func NewWriter(w io.Writer) *Writer {
+ zlib := sync.GetZlibWriter(w)
return &Writer{
raw: w,
- zlib: zlib.NewWriter(w),
+ zlib: zlib,
}
}
@@ -100,6 +102,7 @@ func (w *Writer) Hash() plumbing.Hash {
// Calling Close does not close the wrapped io.Writer originally passed to
// NewWriter.
func (w *Writer) Close() error {
+ defer sync.PutZlibWriter(w.zlib)
if err := w.zlib.Close(); err != nil {
return err
}
diff --git a/plumbing/format/packfile/common.go b/plumbing/format/packfile/common.go
index df423ad..36c5ef5 100644
--- a/plumbing/format/packfile/common.go
+++ b/plumbing/format/packfile/common.go
@@ -1,10 +1,7 @@
package packfile
import (
- "bytes"
- "compress/zlib"
"io"
- "sync"
"github.com/go-git/go-git/v5/plumbing/storer"
"github.com/go-git/go-git/v5/utils/ioutil"
@@ -61,18 +58,3 @@ func WritePackfileToObjectStorage(
return err
}
-
-var bufPool = sync.Pool{
- New: func() interface{} {
- return bytes.NewBuffer(nil)
- },
-}
-
-var zlibInitBytes = []byte{0x78, 0x9c, 0x01, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0x00, 0x01}
-
-var zlibReaderPool = sync.Pool{
- New: func() interface{} {
- r, _ := zlib.NewReader(bytes.NewReader(zlibInitBytes))
- return r
- },
-}
diff --git a/plumbing/format/packfile/delta_test.go b/plumbing/format/packfile/delta_test.go
index 137e485..e8f5ea6 100644
--- a/plumbing/format/packfile/delta_test.go
+++ b/plumbing/format/packfile/delta_test.go
@@ -2,7 +2,7 @@ package packfile
import (
"bytes"
- "io/ioutil"
+ "io"
"math/rand"
"github.com/go-git/go-git/v5/plumbing"
@@ -109,14 +109,14 @@ func (s *DeltaSuite) TestAddDeltaReader(c *C) {
targetBuf := genBytes(t.target)
delta := DiffDelta(baseBuf, targetBuf)
- deltaRC := ioutil.NopCloser(bytes.NewReader(delta))
+ deltaRC := io.NopCloser(bytes.NewReader(delta))
c.Log("Executing test case:", t.description)
resultRC, err := ReaderFromDelta(baseObj, deltaRC)
c.Assert(err, IsNil)
- result, err := ioutil.ReadAll(resultRC)
+ result, err := io.ReadAll(resultRC)
c.Assert(err, IsNil)
err = resultRC.Close()
@@ -164,12 +164,12 @@ func (s *DeltaSuite) TestMaxCopySizeDeltaReader(c *C) {
targetBuf = append(targetBuf, byte(1))
delta := DiffDelta(baseBuf, targetBuf)
- deltaRC := ioutil.NopCloser(bytes.NewReader(delta))
+ deltaRC := io.NopCloser(bytes.NewReader(delta))
resultRC, err := ReaderFromDelta(baseObj, deltaRC)
c.Assert(err, IsNil)
- result, err := ioutil.ReadAll(resultRC)
+ result, err := io.ReadAll(resultRC)
c.Assert(err, IsNil)
err = resultRC.Close()
diff --git a/plumbing/format/packfile/diff_delta.go b/plumbing/format/packfile/diff_delta.go
index 1951b34..2c7a335 100644
--- a/plumbing/format/packfile/diff_delta.go
+++ b/plumbing/format/packfile/diff_delta.go
@@ -5,6 +5,7 @@ import (
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/utils/ioutil"
+ "github.com/go-git/go-git/v5/utils/sync"
)
// See https://github.com/jelmer/dulwich/blob/master/dulwich/pack.py and
@@ -43,18 +44,16 @@ func getDelta(index *deltaIndex, base, target plumbing.EncodedObject) (o plumbin
defer ioutil.CheckClose(tr, &err)
- bb := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(bb)
- bb.Reset()
+ bb := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(bb)
_, err = bb.ReadFrom(br)
if err != nil {
return nil, err
}
- tb := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(tb)
- tb.Reset()
+ tb := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(tb)
_, err = tb.ReadFrom(tr)
if err != nil {
@@ -80,9 +79,8 @@ func DiffDelta(src, tgt []byte) []byte {
}
func diffDelta(index *deltaIndex, src []byte, tgt []byte) []byte {
- buf := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(buf)
- buf.Reset()
+ buf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(buf)
buf.Write(deltaEncodeSize(len(src)))
buf.Write(deltaEncodeSize(len(tgt)))
@@ -90,9 +88,8 @@ func diffDelta(index *deltaIndex, src []byte, tgt []byte) []byte {
index.init(src)
}
- ibuf := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(ibuf)
- ibuf.Reset()
+ ibuf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(ibuf)
for i := 0; i < len(tgt); i++ {
offset, l := index.findMatch(src, tgt, i)
diff --git a/plumbing/format/packfile/encoder.go b/plumbing/format/packfile/encoder.go
index 5501f88..804f5a8 100644
--- a/plumbing/format/packfile/encoder.go
+++ b/plumbing/format/packfile/encoder.go
@@ -2,11 +2,11 @@ package packfile
import (
"compress/zlib"
- "crypto/sha1"
"fmt"
"io"
"github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/plumbing/storer"
"github.com/go-git/go-git/v5/utils/binary"
"github.com/go-git/go-git/v5/utils/ioutil"
@@ -28,7 +28,7 @@ type Encoder struct {
// OFSDeltaObject. To use Reference deltas, set useRefDeltas to true.
func NewEncoder(w io.Writer, s storer.EncodedObjectStorer, useRefDeltas bool) *Encoder {
h := plumbing.Hasher{
- Hash: sha1.New(),
+ Hash: hash.New(hash.CryptoType),
}
mw := io.MultiWriter(w, h)
ow := newOffsetWriter(mw)
@@ -131,11 +131,7 @@ func (e *Encoder) entry(o *ObjectToPack) (err error) {
defer ioutil.CheckClose(or, &err)
_, err = io.Copy(e.zw, or)
- if err != nil {
- return err
- }
-
- return nil
+ return err
}
func (e *Encoder) writeBaseIfDelta(o *ObjectToPack) error {
diff --git a/plumbing/format/packfile/encoder_test.go b/plumbing/format/packfile/encoder_test.go
index c9d49c3..6719f37 100644
--- a/plumbing/format/packfile/encoder_test.go
+++ b/plumbing/format/packfile/encoder_test.go
@@ -3,10 +3,10 @@ package packfile
import (
"bytes"
"io"
- stdioutil "io/ioutil"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/format/idxfile"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/storage/memory"
"github.com/go-git/go-billy/v5/memfs"
@@ -30,10 +30,10 @@ func (s *EncoderSuite) SetUpTest(c *C) {
}
func (s *EncoderSuite) TestCorrectPackHeader(c *C) {
- hash, err := s.enc.Encode([]plumbing.Hash{}, 10)
+ h, err := s.enc.Encode([]plumbing.Hash{}, 10)
c.Assert(err, IsNil)
- hb := [20]byte(hash)
+ hb := [hash.Size]byte(h)
// PACK + VERSION + OBJECTS + HASH
expectedResult := []byte{'P', 'A', 'C', 'K', 0, 0, 0, 2, 0, 0, 0, 0}
@@ -51,7 +51,7 @@ func (s *EncoderSuite) TestCorrectPackWithOneEmptyObject(c *C) {
_, err := s.store.SetEncodedObject(o)
c.Assert(err, IsNil)
- hash, err := s.enc.Encode([]plumbing.Hash{o.Hash()}, 10)
+ h, err := s.enc.Encode([]plumbing.Hash{o.Hash()}, 10)
c.Assert(err, IsNil)
// PACK + VERSION(2) + OBJECT NUMBER(1)
@@ -64,7 +64,7 @@ func (s *EncoderSuite) TestCorrectPackWithOneEmptyObject(c *C) {
[]byte{120, 156, 1, 0, 0, 255, 255, 0, 0, 0, 1}...)
// + HASH
- hb := [20]byte(hash)
+ hb := [hash.Size]byte(h)
expectedResult = append(expectedResult, hb[:]...)
result := s.buf.Bytes()
@@ -277,13 +277,13 @@ func objectsEqual(c *C, o1, o2 plumbing.EncodedObject) {
r1, err := o1.Reader()
c.Assert(err, IsNil)
- b1, err := stdioutil.ReadAll(r1)
+ b1, err := io.ReadAll(r1)
c.Assert(err, IsNil)
r2, err := o2.Reader()
c.Assert(err, IsNil)
- b2, err := stdioutil.ReadAll(r2)
+ b2, err := io.ReadAll(r2)
c.Assert(err, IsNil)
c.Assert(bytes.Compare(b1, b2), Equals, 0)
diff --git a/plumbing/format/packfile/packfile.go b/plumbing/format/packfile/packfile.go
index 8dd6041..6852702 100644
--- a/plumbing/format/packfile/packfile.go
+++ b/plumbing/format/packfile/packfile.go
@@ -2,7 +2,6 @@ package packfile
import (
"bytes"
- "compress/zlib"
"fmt"
"io"
"os"
@@ -13,6 +12,7 @@ import (
"github.com/go-git/go-git/v5/plumbing/format/idxfile"
"github.com/go-git/go-git/v5/plumbing/storer"
"github.com/go-git/go-git/v5/utils/ioutil"
+ "github.com/go-git/go-git/v5/utils/sync"
)
var (
@@ -138,9 +138,8 @@ func (p *Packfile) getObjectSize(h *ObjectHeader) (int64, error) {
case plumbing.CommitObject, plumbing.TreeObject, plumbing.BlobObject, plumbing.TagObject:
return h.Length, nil
case plumbing.REFDeltaObject, plumbing.OFSDeltaObject:
- buf := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(buf)
- buf.Reset()
+ buf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(buf)
if _, _, err := p.s.NextObject(buf); err != nil {
return 0, err
@@ -227,9 +226,9 @@ func (p *Packfile) getNextObject(h *ObjectHeader, hash plumbing.Hash) (plumbing.
// For delta objects we read the delta data and apply the small object
// optimization only if the expanded version of the object still meets
// the small object threshold condition.
- buf := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(buf)
- buf.Reset()
+ buf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(buf)
+
if _, _, err := p.s.NextObject(buf); err != nil {
return nil, err
}
@@ -290,14 +289,13 @@ func (p *Packfile) getObjectContent(offset int64) (io.ReadCloser, error) {
func asyncReader(p *Packfile) (io.ReadCloser, error) {
reader := ioutil.NewReaderUsingReaderAt(p.file, p.s.r.offset)
- zr := zlibReaderPool.Get().(io.ReadCloser)
-
- if err := zr.(zlib.Resetter).Reset(reader, nil); err != nil {
+ zr, err := sync.GetZlibReader(reader)
+ if err != nil {
return nil, fmt.Errorf("zlib reset error: %s", err)
}
- return ioutil.NewReadCloserWithCloser(zr, func() error {
- zlibReaderPool.Put(zr)
+ return ioutil.NewReadCloserWithCloser(zr.Reader, func() error {
+ sync.PutZlibReader(zr)
return nil
}), nil
@@ -373,9 +371,9 @@ func (p *Packfile) fillRegularObjectContent(obj plumbing.EncodedObject) (err err
}
func (p *Packfile) fillREFDeltaObjectContent(obj plumbing.EncodedObject, ref plumbing.Hash) error {
- buf := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(buf)
- buf.Reset()
+ buf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(buf)
+
_, _, err := p.s.NextObject(buf)
if err != nil {
return err
@@ -417,9 +415,9 @@ func (p *Packfile) fillREFDeltaObjectContentWithBuffer(obj plumbing.EncodedObjec
}
func (p *Packfile) fillOFSDeltaObjectContent(obj plumbing.EncodedObject, offset int64) error {
- buf := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(buf)
- buf.Reset()
+ buf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(buf)
+
_, _, err := p.s.NextObject(buf)
if err != nil {
return err
diff --git a/plumbing/format/packfile/parser.go b/plumbing/format/packfile/parser.go
index ee5c289..edbc0e7 100644
--- a/plumbing/format/packfile/parser.go
+++ b/plumbing/format/packfile/parser.go
@@ -4,12 +4,12 @@ import (
"bytes"
"errors"
"io"
- stdioutil "io/ioutil"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/cache"
"github.com/go-git/go-git/v5/plumbing/storer"
"github.com/go-git/go-git/v5/utils/ioutil"
+ "github.com/go-git/go-git/v5/utils/sync"
)
var (
@@ -175,7 +175,8 @@ func (p *Parser) init() error {
}
func (p *Parser) indexObjects() error {
- buf := new(bytes.Buffer)
+ buf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(buf)
for i := uint32(0); i < p.count; i++ {
buf.Reset()
@@ -219,6 +220,7 @@ func (p *Parser) indexObjects() error {
ota = newBaseObject(oh.Offset, oh.Length, t)
}
+ buf.Grow(int(oh.Length))
_, crc, err := p.scanner.NextObject(buf)
if err != nil {
return err
@@ -234,6 +236,15 @@ func (p *Parser) indexObjects() error {
return err
}
+ // Move children of placeholder parent into actual parent, in case this
+ // was a non-external delta reference.
+ if placeholder, ok := p.oiByHash[sha1]; ok {
+ ota.Children = placeholder.Children
+ for _, c := range ota.Children {
+ c.Parent = ota
+ }
+ }
+
ota.SHA1 = sha1
p.oiByHash[ota.SHA1] = ota
}
@@ -264,7 +275,9 @@ func (p *Parser) indexObjects() error {
}
func (p *Parser) resolveDeltas() error {
- buf := &bytes.Buffer{}
+ buf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(buf)
+
for _, obj := range p.oi {
buf.Reset()
err := p.get(obj, buf)
@@ -283,9 +296,10 @@ func (p *Parser) resolveDeltas() error {
if !obj.IsDelta() && len(obj.Children) > 0 {
for _, child := range obj.Children {
- if err := p.resolveObject(stdioutil.Discard, child, content); err != nil {
+ if err := p.resolveObject(io.Discard, child, content); err != nil {
return err
}
+ p.resolveExternalRef(child)
}
// Remove the delta from the cache.
@@ -298,6 +312,16 @@ func (p *Parser) resolveDeltas() error {
return nil
}
+func (p *Parser) resolveExternalRef(o *objectInfo) {
+ if ref, ok := p.oiByHash[o.SHA1]; ok && ref.ExternalRef {
+ p.oiByHash[o.SHA1] = o
+ o.Children = ref.Children
+ for _, c := range o.Children {
+ c.Parent = o
+ }
+ }
+}
+
func (p *Parser) get(o *objectInfo, buf *bytes.Buffer) (err error) {
if !o.ExternalRef { // skip cache check for placeholder parents
b, ok := p.cache.Get(o.Offset)
@@ -335,9 +359,8 @@ func (p *Parser) get(o *objectInfo, buf *bytes.Buffer) (err error) {
}
if o.DiskType.IsDelta() {
- b := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(b)
- b.Reset()
+ b := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(b)
err := p.get(o.Parent, b)
if err != nil {
return err
@@ -371,9 +394,8 @@ func (p *Parser) resolveObject(
if !o.DiskType.IsDelta() {
return nil
}
- buf := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(buf)
- buf.Reset()
+ buf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(buf)
err := p.readData(buf, o)
if err != nil {
return err
diff --git a/plumbing/format/packfile/parser_test.go b/plumbing/format/packfile/parser_test.go
index b0b4af8..b8d080f 100644
--- a/plumbing/format/packfile/parser_test.go
+++ b/plumbing/format/packfile/parser_test.go
@@ -10,8 +10,10 @@ import (
fixtures "github.com/go-git/go-git-fixtures/v4"
"github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/plumbing/cache"
"github.com/go-git/go-git/v5/plumbing/format/packfile"
"github.com/go-git/go-git/v5/plumbing/storer"
+ "github.com/go-git/go-git/v5/storage/filesystem"
. "gopkg.in/check.v1"
)
@@ -132,6 +134,32 @@ func (s *ParserSuite) TestThinPack(c *C) {
}
+func (s *ParserSuite) TestResolveExternalRefsInThinPack(c *C) {
+ extRefsThinPack := fixtures.ByTag("codecommit").One()
+
+ scanner := packfile.NewScanner(extRefsThinPack.Packfile())
+
+ obs := new(testObserver)
+ parser, err := packfile.NewParser(scanner, obs)
+ c.Assert(err, IsNil)
+
+ _, err = parser.Parse()
+ c.Assert(err, IsNil)
+}
+
+func (s *ParserSuite) TestResolveExternalRefs(c *C) {
+ extRefsThinPack := fixtures.ByTag("delta-before-base").One()
+
+ scanner := packfile.NewScanner(extRefsThinPack.Packfile())
+
+ obs := new(testObserver)
+ parser, err := packfile.NewParser(scanner, obs)
+ c.Assert(err, IsNil)
+
+ _, err = parser.Parse()
+ c.Assert(err, IsNil)
+}
+
type observerObject struct {
hash string
otype plumbing.ObjectType
@@ -235,3 +263,29 @@ func BenchmarkParseBasic(b *testing.B) {
}
}
}
+
+func BenchmarkParser(b *testing.B) {
+ f := fixtures.Basic().One()
+ defer fixtures.Clean()
+
+ b.ResetTimer()
+ for n := 0; n < b.N; n++ {
+ b.StopTimer()
+ scanner := packfile.NewScanner(f.Packfile())
+ fs := osfs.New(os.TempDir())
+ storage := filesystem.NewStorage(fs, cache.NewObjectLRUDefault())
+
+ parser, err := packfile.NewParserWithStorage(scanner, storage)
+ if err != nil {
+ b.Error(err)
+ }
+
+ b.StartTimer()
+ _, err = parser.Parse()
+
+ b.StopTimer()
+ if err != nil {
+ b.Error(err)
+ }
+ }
+}
diff --git a/plumbing/format/packfile/patch_delta.go b/plumbing/format/packfile/patch_delta.go
index 17da11e..f00562d 100644
--- a/plumbing/format/packfile/patch_delta.go
+++ b/plumbing/format/packfile/patch_delta.go
@@ -9,6 +9,7 @@ import (
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/utils/ioutil"
+ "github.com/go-git/go-git/v5/utils/sync"
)
// See https://github.com/git/git/blob/49fa3dc76179e04b0833542fa52d0f287a4955ac/delta.h
@@ -34,18 +35,16 @@ func ApplyDelta(target, base plumbing.EncodedObject, delta []byte) (err error) {
defer ioutil.CheckClose(w, &err)
- buf := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(buf)
- buf.Reset()
+ buf := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(buf)
_, err = buf.ReadFrom(r)
if err != nil {
return err
}
src := buf.Bytes()
- dst := bufPool.Get().(*bytes.Buffer)
- defer bufPool.Put(dst)
- dst.Reset()
+ dst := sync.GetBytesBuffer()
+ defer sync.PutBytesBuffer(dst)
err = patchDelta(dst, src, delta)
if err != nil {
return err
@@ -53,9 +52,9 @@ func ApplyDelta(target, base plumbing.EncodedObject, delta []byte) (err error) {
target.SetSize(int64(dst.Len()))
- b := byteSlicePool.Get().([]byte)
- _, err = io.CopyBuffer(w, dst, b)
- byteSlicePool.Put(b)
+ b := sync.GetByteSlice()
+ _, err = io.CopyBuffer(w, dst, *b)
+ sync.PutByteSlice(b)
return err
}
diff --git a/plumbing/format/packfile/scanner.go b/plumbing/format/packfile/scanner.go
index 5d9e8fb..730343e 100644
--- a/plumbing/format/packfile/scanner.go
+++ b/plumbing/format/packfile/scanner.go
@@ -3,17 +3,15 @@ package packfile
import (
"bufio"
"bytes"
- "compress/zlib"
"fmt"
"hash"
"hash/crc32"
"io"
- stdioutil "io/ioutil"
- "sync"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/utils/binary"
"github.com/go-git/go-git/v5/utils/ioutil"
+ "github.com/go-git/go-git/v5/utils/sync"
)
var (
@@ -114,7 +112,7 @@ func (s *Scanner) Header() (version, objects uint32, err error) {
return
}
-// readSignature reads an returns the signature field in the packfile.
+// readSignature reads a returns the signature field in the packfile.
func (s *Scanner) readSignature() ([]byte, error) {
var sig = make([]byte, 4)
if _, err := io.ReadFull(s.r, sig); err != nil {
@@ -243,7 +241,7 @@ func (s *Scanner) discardObjectIfNeeded() error {
}
h := s.pendingObject
- n, _, err := s.NextObject(stdioutil.Discard)
+ n, _, err := s.NextObject(io.Discard)
if err != nil {
return err
}
@@ -323,14 +321,14 @@ func (s *Scanner) NextObject(w io.Writer) (written int64, crc32 uint32, err erro
// ReadObject returns a reader for the object content and an error
func (s *Scanner) ReadObject() (io.ReadCloser, error) {
s.pendingObject = nil
- zr := zlibReaderPool.Get().(io.ReadCloser)
+ zr, err := sync.GetZlibReader(s.r)
- if err := zr.(zlib.Resetter).Reset(s.r, nil); err != nil {
+ if err != nil {
return nil, fmt.Errorf("zlib reset error: %s", err)
}
- return ioutil.NewReadCloserWithCloser(zr, func() error {
- zlibReaderPool.Put(zr)
+ return ioutil.NewReadCloserWithCloser(zr.Reader, func() error {
+ sync.PutZlibReader(zr)
return nil
}), nil
}
@@ -338,26 +336,20 @@ func (s *Scanner) ReadObject() (io.ReadCloser, error) {
// ReadRegularObject reads and write a non-deltified object
// from it zlib stream in an object entry in the packfile.
func (s *Scanner) copyObject(w io.Writer) (n int64, err error) {
- zr := zlibReaderPool.Get().(io.ReadCloser)
- defer zlibReaderPool.Put(zr)
+ zr, err := sync.GetZlibReader(s.r)
+ defer sync.PutZlibReader(zr)
- if err = zr.(zlib.Resetter).Reset(s.r, nil); err != nil {
+ if err != nil {
return 0, fmt.Errorf("zlib reset error: %s", err)
}
- defer ioutil.CheckClose(zr, &err)
- buf := byteSlicePool.Get().([]byte)
- n, err = io.CopyBuffer(w, zr, buf)
- byteSlicePool.Put(buf)
+ defer ioutil.CheckClose(zr.Reader, &err)
+ buf := sync.GetByteSlice()
+ n, err = io.CopyBuffer(w, zr.Reader, *buf)
+ sync.PutByteSlice(buf)
return
}
-var byteSlicePool = sync.Pool{
- New: func() interface{} {
- return make([]byte, 32*1024)
- },
-}
-
// SeekFromStart sets a new offset from start, returns the old position before
// the change.
func (s *Scanner) SeekFromStart(offset int64) (previous int64, err error) {
@@ -387,9 +379,10 @@ func (s *Scanner) Checksum() (plumbing.Hash, error) {
// Close reads the reader until io.EOF
func (s *Scanner) Close() error {
- buf := byteSlicePool.Get().([]byte)
- _, err := io.CopyBuffer(stdioutil.Discard, s.r, buf)
- byteSlicePool.Put(buf)
+ buf := sync.GetByteSlice()
+ _, err := io.CopyBuffer(io.Discard, s.r, *buf)
+ sync.PutByteSlice(buf)
+
return err
}
@@ -399,13 +392,13 @@ func (s *Scanner) Flush() error {
}
// scannerReader has the following characteristics:
-// - Provides an io.SeekReader impl for bufio.Reader, when the underlying
-// reader supports it.
-// - Keeps track of the current read position, for when the underlying reader
-// isn't an io.SeekReader, but we still want to know the current offset.
-// - Writes to the hash writer what it reads, with the aid of a smaller buffer.
-// The buffer helps avoid a performance penality for performing small writes
-// to the crc32 hash writer.
+// - Provides an io.SeekReader impl for bufio.Reader, when the underlying
+// reader supports it.
+// - Keeps track of the current read position, for when the underlying reader
+// isn't an io.SeekReader, but we still want to know the current offset.
+// - Writes to the hash writer what it reads, with the aid of a smaller buffer.
+// The buffer helps avoid a performance penalty for performing small writes
+// to the crc32 hash writer.
type scannerReader struct {
reader io.Reader
crc io.Writer
diff --git a/plumbing/format/packfile/scanner_test.go b/plumbing/format/packfile/scanner_test.go
index 892a27c..9dcc359 100644
--- a/plumbing/format/packfile/scanner_test.go
+++ b/plumbing/format/packfile/scanner_test.go
@@ -6,6 +6,7 @@ import (
fixtures "github.com/go-git/go-git-fixtures/v4"
"github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/plumbing/hash"
. "gopkg.in/check.v1"
)
@@ -71,7 +72,7 @@ func (s *ScannerSuite) testNextObjectHeader(c *C, tag string,
n, err := p.Checksum()
c.Assert(err, IsNil)
- c.Assert(n, HasLen, 20)
+ c.Assert(n, HasLen, hash.Size)
}
func (s *ScannerSuite) TestNextObjectHeaderWithOutReadObject(c *C) {
diff --git a/plumbing/hash.go b/plumbing/hash.go
index afc602a..39bb73f 100644
--- a/plumbing/hash.go
+++ b/plumbing/hash.go
@@ -2,15 +2,15 @@ package plumbing
import (
"bytes"
- "crypto/sha1"
"encoding/hex"
- "hash"
"sort"
"strconv"
+
+ "github.com/go-git/go-git/v5/plumbing/hash"
)
// Hash SHA1 hashed content
-type Hash [20]byte
+type Hash [hash.Size]byte
// ZeroHash is Hash with value zero
var ZeroHash Hash
@@ -46,7 +46,7 @@ type Hasher struct {
}
func NewHasher(t ObjectType, size int64) Hasher {
- h := Hasher{sha1.New()}
+ h := Hasher{hash.New(hash.CryptoType)}
h.Write(t.Bytes())
h.Write([]byte(" "))
h.Write([]byte(strconv.FormatInt(size, 10)))
@@ -74,10 +74,11 @@ func (p HashSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// IsHash returns true if the given string is a valid hash.
func IsHash(s string) bool {
- if len(s) != 40 {
+ switch len(s) {
+ case hash.HexSize:
+ _, err := hex.DecodeString(s)
+ return err == nil
+ default:
return false
}
-
- _, err := hex.DecodeString(s)
- return err == nil
}
diff --git a/plumbing/hash/hash.go b/plumbing/hash/hash.go
new file mode 100644
index 0000000..82d1856
--- /dev/null
+++ b/plumbing/hash/hash.go
@@ -0,0 +1,60 @@
+// package hash provides a way for managing the
+// underlying hash implementations used across go-git.
+package hash
+
+import (
+ "crypto"
+ "fmt"
+ "hash"
+
+ "github.com/pjbgf/sha1cd"
+)
+
+// algos is a map of hash algorithms.
+var algos = map[crypto.Hash]func() hash.Hash{}
+
+func init() {
+ reset()
+}
+
+// reset resets the default algos value. Can be used after running tests
+// that registers new algorithms to avoid side effects.
+func reset() {
+ algos[crypto.SHA1] = sha1cd.New
+ algos[crypto.SHA256] = crypto.SHA256.New
+}
+
+// RegisterHash allows for the hash algorithm used to be overriden.
+// This ensures the hash selection for go-git must be explicit, when
+// overriding the default value.
+func RegisterHash(h crypto.Hash, f func() hash.Hash) error {
+ if f == nil {
+ return fmt.Errorf("cannot register hash: f is nil")
+ }
+
+ switch h {
+ case crypto.SHA1:
+ algos[h] = f
+ case crypto.SHA256:
+ algos[h] = f
+ default:
+ return fmt.Errorf("unsupported hash function: %v", h)
+ }
+ return nil
+}
+
+// Hash is the same as hash.Hash. This allows consumers
+// to not having to import this package alongside "hash".
+type Hash interface {
+ hash.Hash
+}
+
+// New returns a new Hash for the given hash function.
+// It panics if the hash function is not registered.
+func New(h crypto.Hash) Hash {
+ hh, ok := algos[h]
+ if !ok {
+ panic(fmt.Sprintf("hash algorithm not registered: %v", h))
+ }
+ return hh()
+}
diff --git a/plumbing/hash/hash_sha1.go b/plumbing/hash/hash_sha1.go
new file mode 100644
index 0000000..e3cb60f
--- /dev/null
+++ b/plumbing/hash/hash_sha1.go
@@ -0,0 +1,15 @@
+//go:build !sha256
+// +build !sha256
+
+package hash
+
+import "crypto"
+
+const (
+ // CryptoType defines what hash algorithm is being used.
+ CryptoType = crypto.SHA1
+ // Size defines the amount of bytes the hash yields.
+ Size = 20
+ // HexSize defines the strings size of the hash when represented in hexadecimal.
+ HexSize = 40
+)
diff --git a/plumbing/hash/hash_sha256.go b/plumbing/hash/hash_sha256.go
new file mode 100644
index 0000000..1c52b89
--- /dev/null
+++ b/plumbing/hash/hash_sha256.go
@@ -0,0 +1,15 @@
+//go:build sha256
+// +build sha256
+
+package hash
+
+import "crypto"
+
+const (
+ // CryptoType defines what hash algorithm is being used.
+ CryptoType = crypto.SHA256
+ // Size defines the amount of bytes the hash yields.
+ Size = 32
+ // HexSize defines the strings size of the hash when represented in hexadecimal.
+ HexSize = 64
+)
diff --git a/plumbing/hash/hash_test.go b/plumbing/hash/hash_test.go
new file mode 100644
index 0000000..f70ad11
--- /dev/null
+++ b/plumbing/hash/hash_test.go
@@ -0,0 +1,103 @@
+package hash
+
+import (
+ "crypto"
+ "crypto/sha1"
+ "crypto/sha512"
+ "encoding/hex"
+ "hash"
+ "strings"
+ "testing"
+)
+
+func TestRegisterHash(t *testing.T) {
+ // Reset default hash to avoid side effects.
+ defer reset()
+
+ tests := []struct {
+ name string
+ hash crypto.Hash
+ new func() hash.Hash
+ wantErr string
+ }{
+ {
+ name: "sha1",
+ hash: crypto.SHA1,
+ new: sha1.New,
+ },
+ {
+ name: "sha1",
+ hash: crypto.SHA1,
+ wantErr: "cannot register hash: f is nil",
+ },
+ {
+ name: "sha512",
+ hash: crypto.SHA512,
+ new: sha512.New,
+ wantErr: "unsupported hash function",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ err := RegisterHash(tt.hash, tt.new)
+ if tt.wantErr == "" && err != nil {
+ t.Errorf("unexpected error: %v", err)
+ } else if tt.wantErr != "" && err == nil {
+ t.Errorf("expected error: %v got: nil", tt.wantErr)
+ } else if err != nil && !strings.Contains(err.Error(), tt.wantErr) {
+ t.Errorf("expected error: %v got: %v", tt.wantErr, err)
+ }
+ })
+ }
+}
+
+// Verifies that the SHA1 implementation used is collision-resistant
+// by default.
+func TestSha1Collision(t *testing.T) {
+ defer reset()
+
+ tests := []struct {
+ name string
+ content string
+ hash string
+ before func()
+ }{
+ {
+ name: "sha-mbles-1: with collision detection",
+ content: "99040d047fe81780012000ff4b65792069732070617274206f66206120636f6c6c6973696f6e212049742773206120747261702179c61af0afcc054515d9274e7307624b1dc7fb23988bb8de8b575dba7b9eab31c1674b6d974378a827732ff5851c76a2e60772b5a47ce1eac40bb993c12d8c70e24a4f8d5fcdedc1b32c9cf19e31af2429759d42e4dfdb31719f587623ee552939b6dcdc459fca53553b70f87ede30a247ea3af6c759a2f20b320d760db64ff479084fd3ccb3cdd48362d96a9c430617caff6c36c637e53fde28417f626fec54ed7943a46e5f5730f2bb38fb1df6e0090010d00e24ad78bf92641993608e8d158a789f34c46fe1e6027f35a4cbfb827076c50eca0e8b7cca69bb2c2b790259f9bf9570dd8d4437a3115faff7c3cac09ad25266055c27104755178eaeff825a2caa2acfb5de64ce7641dc59a541a9fc9c756756e2e23dc713c8c24c9790aa6b0e38a7f55f14452a1ca2850ddd9562fd9a18ad42496aa97008f74672f68ef461eb88b09933d626b4f918749cc027fddd6c425fc4216835d0134d15285bab2cb784a4f7cbb4fb514d4bf0f6237cf00a9e9f132b9a066e6fd17f6c42987478586ff651af96747fb426b9872b9a88e4063f59bb334cc00650f83a80c42751b71974d300fc2819a2e8f1e32c1b51cb18e6bfc4db9baef675d4aaf5b1574a047f8f6dd2ec153a93412293974d928f88ced9363cfef97ce2e742bf34c96b8ef3875676fea5cca8e5f7dea0bab2413d4de00ee71ee01f162bdb6d1eafd925e6aebaae6a354ef17cf205a404fbdb12fc454d41fdd95cf2459664a2ad032d1da60a73264075d7f1e0d6c1403ae7a0d861df3fe5707188dd5e07d1589b9f8b6630553f8fc352b3e0c27da80bddba4c64020d",
+ hash: "4f3d9be4a472c4dae83c6314aa6c36a064c1fd14",
+ },
+ {
+ name: "sha-mbles-1: with default SHA1",
+ content: "99040d047fe81780012000ff4b65792069732070617274206f66206120636f6c6c6973696f6e212049742773206120747261702179c61af0afcc054515d9274e7307624b1dc7fb23988bb8de8b575dba7b9eab31c1674b6d974378a827732ff5851c76a2e60772b5a47ce1eac40bb993c12d8c70e24a4f8d5fcdedc1b32c9cf19e31af2429759d42e4dfdb31719f587623ee552939b6dcdc459fca53553b70f87ede30a247ea3af6c759a2f20b320d760db64ff479084fd3ccb3cdd48362d96a9c430617caff6c36c637e53fde28417f626fec54ed7943a46e5f5730f2bb38fb1df6e0090010d00e24ad78bf92641993608e8d158a789f34c46fe1e6027f35a4cbfb827076c50eca0e8b7cca69bb2c2b790259f9bf9570dd8d4437a3115faff7c3cac09ad25266055c27104755178eaeff825a2caa2acfb5de64ce7641dc59a541a9fc9c756756e2e23dc713c8c24c9790aa6b0e38a7f55f14452a1ca2850ddd9562fd9a18ad42496aa97008f74672f68ef461eb88b09933d626b4f918749cc027fddd6c425fc4216835d0134d15285bab2cb784a4f7cbb4fb514d4bf0f6237cf00a9e9f132b9a066e6fd17f6c42987478586ff651af96747fb426b9872b9a88e4063f59bb334cc00650f83a80c42751b71974d300fc2819a2e8f1e32c1b51cb18e6bfc4db9baef675d4aaf5b1574a047f8f6dd2ec153a93412293974d928f88ced9363cfef97ce2e742bf34c96b8ef3875676fea5cca8e5f7dea0bab2413d4de00ee71ee01f162bdb6d1eafd925e6aebaae6a354ef17cf205a404fbdb12fc454d41fdd95cf2459664a2ad032d1da60a73264075d7f1e0d6c1403ae7a0d861df3fe5707188dd5e07d1589b9f8b6630553f8fc352b3e0c27da80bddba4c64020d",
+ hash: "8ac60ba76f1999a1ab70223f225aefdc78d4ddc0",
+ before: func() {
+ RegisterHash(crypto.SHA1, sha1.New)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if tt.before != nil {
+ tt.before()
+ }
+
+ h := New(crypto.SHA1)
+ data, err := hex.DecodeString(tt.content)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ h.Reset()
+ h.Write(data)
+ sum := h.Sum(nil)
+ got := hex.EncodeToString(sum)
+
+ if tt.hash != got {
+ t.Errorf("\n got: %q\nwanted: %q", got, tt.hash)
+ }
+ })
+ }
+}
diff --git a/plumbing/memory.go b/plumbing/memory.go
index 21337cc..6d11271 100644
--- a/plumbing/memory.go
+++ b/plumbing/memory.go
@@ -25,13 +25,13 @@ func (o *MemoryObject) Hash() Hash {
return o.h
}
-// Type return the ObjectType
+// Type returns the ObjectType
func (o *MemoryObject) Type() ObjectType { return o.t }
// SetType sets the ObjectType
func (o *MemoryObject) SetType(t ObjectType) { o.t = t }
-// Size return the size of the object
+// Size returns the size of the object
func (o *MemoryObject) Size() int64 { return o.sz }
// SetSize set the object size, a content of the given size should be written
diff --git a/plumbing/memory_test.go b/plumbing/memory_test.go
index 2a141f4..f76b4f4 100644
--- a/plumbing/memory_test.go
+++ b/plumbing/memory_test.go
@@ -2,7 +2,6 @@ package plumbing
import (
"io"
- "io/ioutil"
. "gopkg.in/check.v1"
)
@@ -52,7 +51,7 @@ func (s *MemoryObjectSuite) TestReader(c *C) {
c.Assert(err, IsNil)
defer func() { c.Assert(reader.Close(), IsNil) }()
- b, err := ioutil.ReadAll(reader)
+ b, err := io.ReadAll(reader)
c.Assert(err, IsNil)
c.Assert(b, DeepEquals, []byte("foo"))
}
@@ -75,7 +74,7 @@ func (s *MemoryObjectSuite) TestSeekableReader(c *C) {
_, err = rs.Seek(pageSize, io.SeekStart)
c.Assert(err, IsNil)
- b, err := ioutil.ReadAll(rs)
+ b, err := io.ReadAll(rs)
c.Assert(err, IsNil)
c.Assert(b, DeepEquals, []byte(payload))
diff --git a/plumbing/object/blob_test.go b/plumbing/object/blob_test.go
index 4461343..9481dbe 100644
--- a/plumbing/object/blob_test.go
+++ b/plumbing/object/blob_test.go
@@ -3,7 +3,6 @@ package object
import (
"bytes"
"io"
- "io/ioutil"
"github.com/go-git/go-git/v5/plumbing"
@@ -37,7 +36,7 @@ func (s *BlobsSuite) TestBlobHash(c *C) {
c.Assert(err, IsNil)
defer func() { c.Assert(reader.Close(), IsNil) }()
- data, err := ioutil.ReadAll(reader)
+ data, err := io.ReadAll(reader)
c.Assert(err, IsNil)
c.Assert(string(data), Equals, "FOO")
}
@@ -96,14 +95,14 @@ func (s *BlobsSuite) TestBlobIter(c *C) {
r1, err := b.Reader()
c.Assert(err, IsNil)
- b1, err := ioutil.ReadAll(r1)
+ b1, err := io.ReadAll(r1)
c.Assert(err, IsNil)
c.Assert(r1.Close(), IsNil)
r2, err := blobs[i].Reader()
c.Assert(err, IsNil)
- b2, err := ioutil.ReadAll(r2)
+ b2, err := io.ReadAll(r2)
c.Assert(err, IsNil)
c.Assert(r2.Close(), IsNil)
diff --git a/plumbing/object/change.go b/plumbing/object/change.go
index 8b119bc..3c619df 100644
--- a/plumbing/object/change.go
+++ b/plumbing/object/change.go
@@ -39,7 +39,7 @@ func (c *Change) Action() (merkletrie.Action, error) {
return merkletrie.Modify, nil
}
-// Files return the files before and after a change.
+// Files returns the files before and after a change.
// For insertions from will be nil. For deletions to will be nil.
func (c *Change) Files() (from, to *File, err error) {
action, err := c.Action()
diff --git a/plumbing/object/commit.go b/plumbing/object/commit.go
index 7a1b8e5..8a0f35c 100644
--- a/plumbing/object/commit.go
+++ b/plumbing/object/commit.go
@@ -1,7 +1,6 @@
package object
import (
- "bufio"
"bytes"
"context"
"errors"
@@ -14,6 +13,7 @@ import (
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/storer"
"github.com/go-git/go-git/v5/utils/ioutil"
+ "github.com/go-git/go-git/v5/utils/sync"
)
const (
@@ -180,9 +180,8 @@ func (c *Commit) Decode(o plumbing.EncodedObject) (err error) {
}
defer ioutil.CheckClose(reader, &err)
- r := bufPool.Get().(*bufio.Reader)
- defer bufPool.Put(r)
- r.Reset(reader)
+ r := sync.GetBufioReader(reader)
+ defer sync.PutBufioReader(r)
var message bool
var pgpsig bool
@@ -377,6 +376,17 @@ func (c *Commit) Verify(armoredKeyRing string) (*openpgp.Entity, error) {
return openpgp.CheckArmoredDetachedSignature(keyring, er, signature, nil)
}
+// Less defines a compare function to determine which commit is 'earlier' by:
+// - First use Committer.When
+// - If Committer.When are equal then use Author.When
+// - If Author.When also equal then compare the string value of the hash
+func (c *Commit) Less(rhs *Commit) bool {
+ return c.Committer.When.Before(rhs.Committer.When) ||
+ (c.Committer.When.Equal(rhs.Committer.When) &&
+ (c.Author.When.Before(rhs.Author.When) ||
+ (c.Author.When.Equal(rhs.Author.When) && bytes.Compare(c.Hash[:], rhs.Hash[:]) < 0)))
+}
+
func indent(t string) string {
var output []string
for _, line := range strings.Split(t, "\n") {
diff --git a/plumbing/object/commit_test.go b/plumbing/object/commit_test.go
index 468a751..4b0f6b4 100644
--- a/plumbing/object/commit_test.go
+++ b/plumbing/object/commit_test.go
@@ -4,7 +4,6 @@ import (
"bytes"
"context"
"io"
- "io/ioutil"
"strings"
"time"
@@ -449,7 +448,7 @@ YIefGtzXfldDxg4=
`
e, err := commit.Verify(armoredKeyRing)
- c.Assert(err, IsNil)
+ c.Assert(err, IsNil)
_, ok := e.Identities["go-git test key"]
c.Assert(ok, Equals, true)
@@ -492,7 +491,7 @@ func (s *SuiteCommit) TestEncodeWithoutSignature(c *C) {
c.Assert(err, IsNil)
er, err := encoded.Reader()
c.Assert(err, IsNil)
- payload, err := ioutil.ReadAll(er)
+ payload, err := io.ReadAll(er)
c.Assert(err, IsNil)
c.Assert(string(payload), Equals, ""+
@@ -504,3 +503,73 @@ func (s *SuiteCommit) TestEncodeWithoutSignature(c *C) {
"\n"+
"Merge branch 'master' of github.com:tyba/git-fixture\n")
}
+
+func (s *SuiteCommit) TestLess(c *C) {
+ when1 := time.Now()
+ when2 := when1.Add(time.Hour)
+
+ hash1 := plumbing.NewHash("1669dce138d9b841a518c64b10914d88f5e488ea")
+ hash2 := plumbing.NewHash("2669dce138d9b841a518c64b10914d88f5e488ea")
+
+ commitLessTests := []struct {
+ Committer1When, Committer2When time.Time
+ Author1When, Author2When time.Time
+ Hash1, Hash2 plumbing.Hash
+ Exp bool
+ }{
+ {when1, when1, when1, when1, hash1, hash2, true},
+ {when1, when1, when1, when1, hash2, hash1, false},
+ {when1, when1, when1, when2, hash1, hash2, true},
+ {when1, when1, when1, when2, hash2, hash1, true},
+ {when1, when1, when2, when1, hash1, hash2, false},
+ {when1, when1, when2, when1, hash2, hash1, false},
+ {when1, when1, when2, when2, hash1, hash2, true},
+ {when1, when1, when2, when2, hash2, hash1, false},
+ {when1, when2, when1, when1, hash1, hash2, true},
+ {when1, when2, when1, when1, hash2, hash1, true},
+ {when1, when2, when1, when2, hash1, hash2, true},
+ {when1, when2, when1, when2, hash2, hash1, true},
+ {when1, when2, when2, when1, hash1, hash2, true},
+ {when1, when2, when2, when1, hash2, hash1, true},
+ {when1, when2, when2, when2, hash1, hash2, true},
+ {when1, when2, when2, when2, hash2, hash1, true},
+ {when2, when1, when1, when1, hash1, hash2, false},
+ {when2, when1, when1, when1, hash2, hash1, false},
+ {when2, when1, when1, when2, hash1, hash2, false},
+ {when2, when1, when1, when2, hash2, hash1, false},
+ {when2, when1, when2, when1, hash1, hash2, false},
+ {when2, when1, when2, when1, hash2, hash1, false},
+ {when2, when1, when2, when2, hash1, hash2, false},
+ {when2, when1, when2, when2, hash2, hash1, false},
+ {when2, when2, when1, when1, hash1, hash2, true},
+ {when2, when2, when1, when1, hash2, hash1, false},
+ {when2, when2, when1, when2, hash1, hash2, true},
+ {when2, when2, when1, when2, hash2, hash1, true},
+ {when2, when2, when2, when1, hash1, hash2, false},
+ {when2, when2, when2, when1, hash2, hash1, false},
+ {when2, when2, when2, when2, hash1, hash2, true},
+ {when2, when2, when2, when2, hash2, hash1, false},
+ }
+
+ for _, t := range commitLessTests {
+ commit1 := &Commit{
+ Hash: t.Hash1,
+ Author: Signature{
+ When: t.Author1When,
+ },
+ Committer: Signature{
+ When: t.Committer1When,
+ },
+ }
+ commit2 := &Commit{
+ Hash: t.Hash2,
+ Author: Signature{
+ When: t.Author2When,
+ },
+ Committer: Signature{
+ When: t.Committer2When,
+ },
+ }
+ c.Assert(commit1.Less(commit2), Equals, t.Exp)
+ }
+}
diff --git a/plumbing/object/common.go b/plumbing/object/common.go
deleted file mode 100644
index 3591f5f..0000000
--- a/plumbing/object/common.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package object
-
-import (
- "bufio"
- "sync"
-)
-
-var bufPool = sync.Pool{
- New: func() interface{} {
- return bufio.NewReader(nil)
- },
-}
diff --git a/plumbing/object/object_test.go b/plumbing/object/object_test.go
index 6c95eef..c4fdb4c 100644
--- a/plumbing/object/object_test.go
+++ b/plumbing/object/object_test.go
@@ -2,7 +2,6 @@ package object
import (
"io"
- "io/ioutil"
"testing"
"time"
@@ -103,7 +102,7 @@ func (s *ObjectsSuite) TestParseTree(c *C) {
reader, err := f.Reader()
c.Assert(err, IsNil)
defer func() { c.Assert(reader.Close(), IsNil) }()
- content, _ := ioutil.ReadAll(reader)
+ content, _ := io.ReadAll(reader)
c.Assert(content, HasLen, 2780)
}
}
diff --git a/plumbing/object/rename.go b/plumbing/object/rename.go
index 7fed72c..ad2b902 100644
--- a/plumbing/object/rename.go
+++ b/plumbing/object/rename.go
@@ -403,10 +403,16 @@ func min(a, b int) int {
return b
}
+const maxMatrixSize = 10000
+
func buildSimilarityMatrix(srcs, dsts []*Change, renameScore int) (similarityMatrix, error) {
// Allocate for the worst-case scenario where every pair has a score
// that we need to consider. We might not need that many.
- matrix := make(similarityMatrix, 0, len(srcs)*len(dsts))
+ matrixSize := len(srcs) * len(dsts)
+ if matrixSize > maxMatrixSize {
+ matrixSize = maxMatrixSize
+ }
+ matrix := make(similarityMatrix, 0, matrixSize)
srcSizes := make([]int64, len(srcs))
dstSizes := make([]int64, len(dsts))
dstTooLarge := make(map[int]bool)
@@ -735,10 +741,7 @@ func (i *similarityIndex) add(key int, cnt uint64) error {
// It's the same key, so increment the counter.
var err error
i.hashes[j], err = newKeyCountPair(key, v.count()+cnt)
- if err != nil {
- return err
- }
- return nil
+ return err
} else if j+1 >= len(i.hashes) {
j = 0
} else {
diff --git a/plumbing/object/signature.go b/plumbing/object/signature.go
new file mode 100644
index 0000000..91cf371
--- /dev/null
+++ b/plumbing/object/signature.go
@@ -0,0 +1,101 @@
+package object
+
+import "bytes"
+
+const (
+ signatureTypeUnknown signatureType = iota
+ signatureTypeOpenPGP
+ signatureTypeX509
+ signatureTypeSSH
+)
+
+var (
+ // openPGPSignatureFormat is the format of an OpenPGP signature.
+ openPGPSignatureFormat = signatureFormat{
+ []byte("-----BEGIN PGP SIGNATURE-----"),
+ []byte("-----BEGIN PGP MESSAGE-----"),
+ }
+ // x509SignatureFormat is the format of an X509 signature, which is
+ // a PKCS#7 (S/MIME) signature.
+ x509SignatureFormat = signatureFormat{
+ []byte("-----BEGIN CERTIFICATE-----"),
+ }
+
+ // sshSignatureFormat is the format of an SSH signature.
+ sshSignatureFormat = signatureFormat{
+ []byte("-----BEGIN SSH SIGNATURE-----"),
+ }
+)
+
+var (
+ // knownSignatureFormats is a map of known signature formats, indexed by
+ // their signatureType.
+ knownSignatureFormats = map[signatureType]signatureFormat{
+ signatureTypeOpenPGP: openPGPSignatureFormat,
+ signatureTypeX509: x509SignatureFormat,
+ signatureTypeSSH: sshSignatureFormat,
+ }
+)
+
+// signatureType represents the type of the signature.
+type signatureType int8
+
+// signatureFormat represents the beginning of a signature.
+type signatureFormat [][]byte
+
+// typeForSignature returns the type of the signature based on its format.
+func typeForSignature(b []byte) signatureType {
+ for t, i := range knownSignatureFormats {
+ for _, begin := range i {
+ if bytes.HasPrefix(b, begin) {
+ return t
+ }
+ }
+ }
+ return signatureTypeUnknown
+}
+
+// parseSignedBytes returns the position of the last signature block found in
+// the given bytes. If no signature block is found, it returns -1.
+//
+// When multiple signature blocks are found, the position of the last one is
+// returned. Any tailing bytes after this signature block start should be
+// considered part of the signature.
+//
+// Given this, it would be safe to use the returned position to split the bytes
+// into two parts: the first part containing the message, the second part
+// containing the signature.
+//
+// Example:
+//
+// message := []byte(`Message with signature
+//
+// -----BEGIN SSH SIGNATURE-----
+// ...`)
+//
+// var signature string
+// if pos, _ := parseSignedBytes(message); pos != -1 {
+// signature = string(message[pos:])
+// message = message[:pos]
+// }
+//
+// This logic is on par with git's gpg-interface.c:parse_signed_buffer().
+// https://github.com/git/git/blob/7c2ef319c52c4997256f5807564523dfd4acdfc7/gpg-interface.c#L668
+func parseSignedBytes(b []byte) (int, signatureType) {
+ var n, match = 0, -1
+ var t signatureType
+ for n < len(b) {
+ var i = b[n:]
+ if st := typeForSignature(i); st != signatureTypeUnknown {
+ match = n
+ t = st
+ }
+ if eol := bytes.IndexByte(i, '\n'); eol >= 0 {
+ n += eol + 1
+ continue
+ }
+ // If we reach this point, we've reached the end.
+ break
+ }
+ return match, t
+}
diff --git a/plumbing/object/signature_test.go b/plumbing/object/signature_test.go
new file mode 100644
index 0000000..1bdb1d1
--- /dev/null
+++ b/plumbing/object/signature_test.go
@@ -0,0 +1,180 @@
+package object
+
+import (
+ "bytes"
+ "testing"
+)
+
+func Test_typeForSignature(t *testing.T) {
+ tests := []struct {
+ name string
+ b []byte
+ want signatureType
+ }{
+ {
+ name: "known signature format (PGP)",
+ b: []byte(`-----BEGIN PGP SIGNATURE-----
+
+iHUEABYKAB0WIQTMqU0ycQ3f6g3PMoWMmmmF4LuV8QUCYGebVwAKCRCMmmmF4LuV
+8VtyAP9LbuXAhtK6FQqOjKybBwlV70rLcXVP24ubDuz88VVwSgD+LuObsasWq6/U
+TssDKHUR2taa53bQYjkZQBpvvwOrLgc=
+=YQUf
+-----END PGP SIGNATURE-----`),
+ want: signatureTypeOpenPGP,
+ },
+ {
+ name: "known signature format (SSH)",
+ b: []byte(`-----BEGIN SSH SIGNATURE-----
+U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgij/EfHS8tCjolj5uEANXgKzFfp
+0D7wOhjWVbYZH6KugAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
+AAAAQIYHMhSVV9L2xwJuV8eWMLjThya8yXgCHDzw3p01D19KirrabW0veiichPB5m+Ihtr
+MKEQruIQWJb+8HVXwssA4=
+-----END SSH SIGNATURE-----`),
+ want: signatureTypeSSH,
+ },
+ {
+ name: "known signature format (X509)",
+ b: []byte(`-----BEGIN CERTIFICATE-----
+MIIDZjCCAk6gAwIBAgIJALZ9Z3Z9Z3Z9MA0GCSqGSIb3DQEBCwUAMIGIMQswCQYD
+VQQGEwJTRTEOMAwGA1UECAwFVGV4YXMxDjAMBgNVBAcMBVRleGFzMQ4wDAYDVQQK
+DAVUZXhhczEOMAwGA1UECwwFVGV4YXMxGDAWBgNVBAMMD1RleGFzIENlcnRpZmlj
+YXRlMB4XDTE3MDUyNjE3MjY0MloXDTI3MDUyNDE3MjY0MlowgYgxCzAJBgNVBAYT
+AlNFMQ4wDAYDVQQIDAVUZXhhczEOMAwGA1UEBwwFVGV4YXMxDjAMBgNVBAoMBVRl
+eGFzMQ4wDAYDVQQLDAVUZXhhczEYMBYGA1UEAwwPVGV4YXMgQ2VydGlmaWNhdGUw
+ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDQZ9Z3Z9Z3Z9Z3Z9Z3Z9Z3
+-----END CERTIFICATE-----`),
+ want: signatureTypeX509,
+ },
+ {
+ name: "unknown signature format",
+ b: []byte(`-----BEGIN ARBITRARY SIGNATURE-----
+U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgij/EfHS8tCjolj5uEANXgKzFfp
+-----END UNKNOWN SIGNATURE-----`),
+ want: signatureTypeUnknown,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := typeForSignature(tt.b); got != tt.want {
+ t.Errorf("typeForSignature() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func Test_parseSignedBytes(t *testing.T) {
+ tests := []struct {
+ name string
+ b []byte
+ wantSignature []byte
+ wantType signatureType
+ }{
+ {
+ name: "detects signature and type",
+ b: []byte(`signed tag
+-----BEGIN PGP SIGNATURE-----
+
+iQGzBAABCAAdFiEE/h5sbbqJFh9j1AdUSqtFFGopTmwFAmB5XFkACgkQSqtFFGop
+TmxvgAv+IPjX5WCLFUIMx8hquMZp1VkhQrseE7rljUYaYpga8gZ9s4kseTGhy7Un
+61U3Ro6cTPEiQF/FkAGzSdPuGqv0ARBqHDX2tUI9+Zs/K8aG8tN+JTaof0gBcTyI
+BLbZVYDTxbS9whxSDewQd0OvBG1m9ISLUhjXo6mbaVvrKXNXTHg40MPZ8ZxjR/vN
+hxXXoUVnFyEDo+v6nK56mYtapThDaQQHHzD6D3VaCq3Msog7qAh9/ZNBmgb88aQ3
+FoK8PHMyr5elsV3mE9bciZBUc+dtzjOvp94uQ5ZKUXaPusXaYXnKpVnzhyer6RBI
+gJLWtPwAinqmN41rGJ8jDAGrpPNjaRrMhGtbyVUPUf19OxuUIroe77sIIKTP0X2o
+Wgp56dYpTst0JcGv/FYCeau/4pTRDfwHAOcDiBQ/0ag9IrZp9P8P9zlKmzNPEraV
+pAe1/EFuhv2UDLucAiWM8iDZIcw8iN0OYMOGUmnk0WuGIo7dzLeqMGY+ND5n5Z8J
+sZC//k6m
+=VhHy
+-----END PGP SIGNATURE-----`),
+ wantSignature: []byte(`-----BEGIN PGP SIGNATURE-----
+
+iQGzBAABCAAdFiEE/h5sbbqJFh9j1AdUSqtFFGopTmwFAmB5XFkACgkQSqtFFGop
+TmxvgAv+IPjX5WCLFUIMx8hquMZp1VkhQrseE7rljUYaYpga8gZ9s4kseTGhy7Un
+61U3Ro6cTPEiQF/FkAGzSdPuGqv0ARBqHDX2tUI9+Zs/K8aG8tN+JTaof0gBcTyI
+BLbZVYDTxbS9whxSDewQd0OvBG1m9ISLUhjXo6mbaVvrKXNXTHg40MPZ8ZxjR/vN
+hxXXoUVnFyEDo+v6nK56mYtapThDaQQHHzD6D3VaCq3Msog7qAh9/ZNBmgb88aQ3
+FoK8PHMyr5elsV3mE9bciZBUc+dtzjOvp94uQ5ZKUXaPusXaYXnKpVnzhyer6RBI
+gJLWtPwAinqmN41rGJ8jDAGrpPNjaRrMhGtbyVUPUf19OxuUIroe77sIIKTP0X2o
+Wgp56dYpTst0JcGv/FYCeau/4pTRDfwHAOcDiBQ/0ag9IrZp9P8P9zlKmzNPEraV
+pAe1/EFuhv2UDLucAiWM8iDZIcw8iN0OYMOGUmnk0WuGIo7dzLeqMGY+ND5n5Z8J
+sZC//k6m
+=VhHy
+-----END PGP SIGNATURE-----`),
+ wantType: signatureTypeOpenPGP,
+ },
+ {
+ name: "last signature for multiple signatures",
+ b: []byte(`signed tag
+-----BEGIN PGP SIGNATURE-----
+
+iQGzBAABCAAdFiEE/h5sbbqJFh9j1AdUSqtFFGopTmwFAmB5XFkACgkQSqtFFGop
+TmxvgAv+IPjX5WCLFUIMx8hquMZp1VkhQrseE7rljUYaYpga8gZ9s4kseTGhy7Un
+61U3Ro6cTPEiQF/FkAGzSdPuGqv0ARBqHDX2tUI9+Zs/K8aG8tN+JTaof0gBcTyI
+BLbZVYDTxbS9whxSDewQd0OvBG1m9ISLUhjXo6mbaVvrKXNXTHg40MPZ8ZxjR/vN
+hxXXoUVnFyEDo+v6nK56mYtapThDaQQHHzD6D3VaCq3Msog7qAh9/ZNBmgb88aQ3
+FoK8PHMyr5elsV3mE9bciZBUc+dtzjOvp94uQ5ZKUXaPusXaYXnKpVnzhyer6RBI
+gJLWtPwAinqmN41rGJ8jDAGrpPNjaRrMhGtbyVUPUf19OxuUIroe77sIIKTP0X2o
+Wgp56dYpTst0JcGv/FYCeau/4pTRDfwHAOcDiBQ/0ag9IrZp9P8P9zlKmzNPEraV
+pAe1/EFuhv2UDLucAiWM8iDZIcw8iN0OYMOGUmnk0WuGIo7dzLeqMGY+ND5n5Z8J
+sZC//k6m
+=VhHy
+-----END PGP SIGNATURE-----
+-----BEGIN SSH SIGNATURE-----
+U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgij/EfHS8tCjolj5uEANXgKzFfp
+0D7wOhjWVbYZH6KugAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
+AAAAQIYHMhSVV9L2xwJuV8eWMLjThya8yXgCHDzw3p01D19KirrabW0veiichPB5m+Ihtr
+MKEQruIQWJb+8HVXwssA4=
+-----END SSH SIGNATURE-----`),
+ wantSignature: []byte(`-----BEGIN SSH SIGNATURE-----
+U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgij/EfHS8tCjolj5uEANXgKzFfp
+0D7wOhjWVbYZH6KugAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
+AAAAQIYHMhSVV9L2xwJuV8eWMLjThya8yXgCHDzw3p01D19KirrabW0veiichPB5m+Ihtr
+MKEQruIQWJb+8HVXwssA4=
+-----END SSH SIGNATURE-----`),
+ wantType: signatureTypeSSH,
+ },
+ {
+ name: "signature with trailing data",
+ b: []byte(`An invalid
+
+-----BEGIN SSH SIGNATURE-----
+U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgij/EfHS8tCjolj5uEANXgKzFfp
+0D7wOhjWVbYZH6KugAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
+AAAAQIYHMhSVV9L2xwJuV8eWMLjThya8yXgCHDzw3p01D19KirrabW0veiichPB5m+Ihtr
+MKEQruIQWJb+8HVXwssA4=
+-----END SSH SIGNATURE-----
+
+signed tag`),
+ wantSignature: []byte(`-----BEGIN SSH SIGNATURE-----
+U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgij/EfHS8tCjolj5uEANXgKzFfp
+0D7wOhjWVbYZH6KugAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
+AAAAQIYHMhSVV9L2xwJuV8eWMLjThya8yXgCHDzw3p01D19KirrabW0veiichPB5m+Ihtr
+MKEQruIQWJb+8HVXwssA4=
+-----END SSH SIGNATURE-----
+
+signed tag`),
+ wantType: signatureTypeSSH,
+ },
+ {
+ name: "data without signature",
+ b: []byte(`Some message`),
+ wantSignature: []byte(``),
+ wantType: signatureTypeUnknown,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ pos, st := parseSignedBytes(tt.b)
+ var signature []byte
+ if pos >= 0 {
+ signature = tt.b[pos:]
+ }
+ if !bytes.Equal(signature, tt.wantSignature) {
+ t.Errorf("parseSignedBytes() got = %s for pos = %v, want %s", signature, pos, tt.wantSignature)
+ }
+ if st != tt.wantType {
+ t.Errorf("parseSignedBytes() got1 = %v, want %v", st, tt.wantType)
+ }
+ })
+ }
+}
diff --git a/plumbing/object/tag.go b/plumbing/object/tag.go
index 216010d..cf46c08 100644
--- a/plumbing/object/tag.go
+++ b/plumbing/object/tag.go
@@ -1,18 +1,16 @@
package object
import (
- "bufio"
"bytes"
"fmt"
"io"
- stdioutil "io/ioutil"
"strings"
"github.com/ProtonMail/go-crypto/openpgp"
-
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/storer"
"github.com/go-git/go-git/v5/utils/ioutil"
+ "github.com/go-git/go-git/v5/utils/sync"
)
// Tag represents an annotated tag object. It points to a single git object of
@@ -93,9 +91,9 @@ func (t *Tag) Decode(o plumbing.EncodedObject) (err error) {
}
defer ioutil.CheckClose(reader, &err)
- r := bufPool.Get().(*bufio.Reader)
- defer bufPool.Put(r)
- r.Reset(reader)
+ r := sync.GetBufioReader(reader)
+ defer sync.PutBufioReader(r)
+
for {
var line []byte
line, err = r.ReadBytes('\n')
@@ -128,40 +126,15 @@ func (t *Tag) Decode(o plumbing.EncodedObject) (err error) {
}
}
- data, err := stdioutil.ReadAll(r)
+ data, err := io.ReadAll(r)
if err != nil {
return err
}
-
- var pgpsig bool
- // Check if data contains PGP signature.
- if bytes.Contains(data, []byte(beginpgp)) {
- // Split the lines at newline.
- messageAndSig := bytes.Split(data, []byte("\n"))
-
- for _, l := range messageAndSig {
- if pgpsig {
- if bytes.Contains(l, []byte(endpgp)) {
- t.PGPSignature += endpgp + "\n"
- break
- } else {
- t.PGPSignature += string(l) + "\n"
- }
- continue
- }
-
- // Check if it's the beginning of a PGP signature.
- if bytes.Contains(l, []byte(beginpgp)) {
- t.PGPSignature += beginpgp + "\n"
- pgpsig = true
- continue
- }
-
- t.Message += string(l) + "\n"
- }
- } else {
- t.Message = string(data)
+ if sm, _ := parseSignedBytes(data); sm >= 0 {
+ t.PGPSignature = string(data[sm:])
+ data = data[:sm]
}
+ t.Message = string(data)
return nil
}
diff --git a/plumbing/object/tag_test.go b/plumbing/object/tag_test.go
index cd1d15d..d374c6c 100644
--- a/plumbing/object/tag_test.go
+++ b/plumbing/object/tag_test.go
@@ -3,7 +3,6 @@ package object
import (
"fmt"
"io"
- "io/ioutil"
"strings"
"time"
@@ -312,6 +311,27 @@ RUysgqjcpT8+iQM1PblGfHR4XAhuOqN5Fx06PSaFZhqvWFezJ28/CLyX5q+oIVk=
c.Assert(decoded.PGPSignature, Equals, pgpsignature)
}
+func (s *TagSuite) TestSSHSignatureSerialization(c *C) {
+ encoded := &plumbing.MemoryObject{}
+ decoded := &Tag{}
+ tag := s.tag(c, plumbing.NewHash("b742a2a9fa0afcfa9a6fad080980fbc26b007c69"))
+
+ signature := `-----BEGIN SSH SIGNATURE-----
+U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgij/EfHS8tCjolj5uEANXgKzFfp
+0D7wOhjWVbYZH6KugAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
+AAAAQIYHMhSVV9L2xwJuV8eWMLjThya8yXgCHDzw3p01D19KirrabW0veiichPB5m+Ihtr
+MKEQruIQWJb+8HVXwssA4=
+-----END SSH SIGNATURE-----`
+ tag.PGPSignature = signature
+
+ err := tag.Encode(encoded)
+ c.Assert(err, IsNil)
+
+ err = decoded.Decode(encoded)
+ c.Assert(err, IsNil)
+ c.Assert(decoded.PGPSignature, Equals, signature)
+}
+
func (s *TagSuite) TestVerify(c *C) {
ts := time.Unix(1617403017, 0)
loc, _ := time.LoadLocation("UTC")
@@ -445,7 +465,7 @@ func (s *TagSuite) TestEncodeWithoutSignature(c *C) {
c.Assert(err, IsNil)
er, err := encoded.Reader()
c.Assert(err, IsNil)
- payload, err := ioutil.ReadAll(er)
+ payload, err := io.ReadAll(er)
c.Assert(err, IsNil)
c.Assert(string(payload), Equals, ""+
diff --git a/plumbing/object/tree.go b/plumbing/object/tree.go
index 5e6378c..e9f7666 100644
--- a/plumbing/object/tree.go
+++ b/plumbing/object/tree.go
@@ -1,7 +1,6 @@
package object
import (
- "bufio"
"context"
"errors"
"fmt"
@@ -14,6 +13,7 @@ import (
"github.com/go-git/go-git/v5/plumbing/filemode"
"github.com/go-git/go-git/v5/plumbing/storer"
"github.com/go-git/go-git/v5/utils/ioutil"
+ "github.com/go-git/go-git/v5/utils/sync"
)
const (
@@ -230,9 +230,9 @@ func (t *Tree) Decode(o plumbing.EncodedObject) (err error) {
}
defer ioutil.CheckClose(reader, &err)
- r := bufPool.Get().(*bufio.Reader)
- defer bufPool.Put(r)
- r.Reset(reader)
+ r := sync.GetBufioReader(reader)
+ defer sync.PutBufioReader(r)
+
for {
str, err := r.ReadString(' ')
if err != nil {
diff --git a/plumbing/protocol/packp/advrefs.go b/plumbing/protocol/packp/advrefs.go
index 1bd724c..f93ad30 100644
--- a/plumbing/protocol/packp/advrefs.go
+++ b/plumbing/protocol/packp/advrefs.go
@@ -57,7 +57,7 @@ func (a *AdvRefs) AddReference(r *plumbing.Reference) error {
switch r.Type() {
case plumbing.SymbolicReference:
v := fmt.Sprintf("%s:%s", r.Name().String(), r.Target().String())
- a.Capabilities.Add(capability.SymRef, v)
+ return a.Capabilities.Add(capability.SymRef, v)
case plumbing.HashReference:
a.References[r.Name().String()] = r.Hash()
default:
@@ -96,12 +96,12 @@ func (a *AdvRefs) addRefs(s storer.ReferenceStorer) error {
//
// Git versions prior to 1.8.4.3 has an special procedure to get
// the reference where is pointing to HEAD:
-// - Check if a reference called master exists. If exists and it
-// has the same hash as HEAD hash, we can say that HEAD is pointing to master
-// - If master does not exists or does not have the same hash as HEAD,
-// order references and check in that order if that reference has the same
-// hash than HEAD. If yes, set HEAD pointing to that branch hash
-// - If no reference is found, throw an error
+// - Check if a reference called master exists. If exists and it
+// has the same hash as HEAD hash, we can say that HEAD is pointing to master
+// - If master does not exists or does not have the same hash as HEAD,
+// order references and check in that order if that reference has the same
+// hash than HEAD. If yes, set HEAD pointing to that branch hash
+// - If no reference is found, throw an error
func (a *AdvRefs) resolveHead(s storer.ReferenceStorer) error {
if a.Head == nil {
return nil
diff --git a/plumbing/protocol/packp/advrefs_decode.go b/plumbing/protocol/packp/advrefs_decode.go
index 63bbe5a..f8d26a2 100644
--- a/plumbing/protocol/packp/advrefs_decode.go
+++ b/plumbing/protocol/packp/advrefs_decode.go
@@ -133,6 +133,7 @@ func decodeFirstHash(p *advRefsDecoder) decoderStateFn {
return nil
}
+ // TODO: Use object-format (when available) for hash size. Git 2.41+
if len(p.line) < hashSize {
p.error("cannot read hash, pkt-line too short")
return nil
diff --git a/plumbing/protocol/packp/advrefs_decode_test.go b/plumbing/protocol/packp/advrefs_decode_test.go
index 83b0b01..d127145 100644
--- a/plumbing/protocol/packp/advrefs_decode_test.go
+++ b/plumbing/protocol/packp/advrefs_decode_test.go
@@ -218,6 +218,16 @@ func (s *AdvRefsDecodeSuite) TestCaps(c *C) {
{Name: capability.SymRef, Values: []string{"HEAD:refs/heads/master"}},
{Name: capability.Agent, Values: []string{"foo=bar"}},
},
+ }, {
+ input: []string{
+ "0000000000000000000000000000000000000000 capabilities^{}\x00report-status report-status-v2 delete-refs side-band-64k quiet atomic ofs-delta object-format=sha1 agent=git/2.41.0\n",
+ pktline.FlushString,
+ },
+ capabilities: []entry{
+ {Name: capability.ReportStatus, Values: []string(nil)},
+ {Name: capability.ObjectFormat, Values: []string{"sha1"}},
+ {Name: capability.Agent, Values: []string{"git/2.41.0"}},
+ },
}} {
ar := s.testDecodeOK(c, test.input)
for _, fixCap := range test.capabilities {
diff --git a/plumbing/protocol/packp/capability/capability.go b/plumbing/protocol/packp/capability/capability.go
index 8d6a56f..b52e8a4 100644
--- a/plumbing/protocol/packp/capability/capability.go
+++ b/plumbing/protocol/packp/capability/capability.go
@@ -1,6 +1,11 @@
// Package capability defines the server and client capabilities.
package capability
+import (
+ "fmt"
+ "os"
+)
+
// Capability describes a server or client capability.
type Capability string
@@ -238,7 +243,15 @@ const (
Filter Capability = "filter"
)
-const DefaultAgent = "go-git/4.x"
+const userAgent = "go-git/5.x"
+
+// DefaultAgent provides the user agent string.
+func DefaultAgent() string {
+ if envUserAgent, ok := os.LookupEnv("GO_GIT_USER_AGENT_EXTRA"); ok {
+ return fmt.Sprintf("%s %s", userAgent, envUserAgent)
+ }
+ return userAgent
+}
var known = map[Capability]bool{
MultiACK: true, MultiACKDetailed: true, NoDone: true, ThinPack: true,
diff --git a/plumbing/protocol/packp/capability/capability_test.go b/plumbing/protocol/packp/capability/capability_test.go
new file mode 100644
index 0000000..f1fd028
--- /dev/null
+++ b/plumbing/protocol/packp/capability/capability_test.go
@@ -0,0 +1,22 @@
+package capability
+
+import (
+ "fmt"
+ "os"
+
+ check "gopkg.in/check.v1"
+)
+
+var _ = check.Suite(&SuiteCapabilities{})
+
+func (s *SuiteCapabilities) TestDefaultAgent(c *check.C) {
+ os.Unsetenv("GO_GIT_USER_AGENT_EXTRA")
+ ua := DefaultAgent()
+ c.Assert(ua, check.Equals, userAgent)
+}
+
+func (s *SuiteCapabilities) TestEnvAgent(c *check.C) {
+ os.Setenv("GO_GIT_USER_AGENT_EXTRA", "abc xyz")
+ ua := DefaultAgent()
+ c.Assert(ua, check.Equals, fmt.Sprintf("%s %s", userAgent, "abc xyz"))
+}
diff --git a/plumbing/protocol/packp/capability/list.go b/plumbing/protocol/packp/capability/list.go
index f41ec79..553d81c 100644
--- a/plumbing/protocol/packp/capability/list.go
+++ b/plumbing/protocol/packp/capability/list.go
@@ -86,7 +86,9 @@ func (l *List) Get(capability Capability) []string {
// Set sets a capability removing the previous values
func (l *List) Set(capability Capability, values ...string) error {
- delete(l.m, capability)
+ if _, ok := l.m[capability]; ok {
+ l.m[capability].Values = l.m[capability].Values[:0]
+ }
return l.Add(capability, values...)
}
diff --git a/plumbing/protocol/packp/capability/list_test.go b/plumbing/protocol/packp/capability/list_test.go
index 61b0b13..71181cb 100644
--- a/plumbing/protocol/packp/capability/list_test.go
+++ b/plumbing/protocol/packp/capability/list_test.go
@@ -122,6 +122,17 @@ func (s *SuiteCapabilities) TestSetEmpty(c *check.C) {
c.Assert(cap.Get(Agent), check.HasLen, 1)
}
+func (s *SuiteCapabilities) TestSetDuplicate(c *check.C) {
+ cap := NewList()
+ err := cap.Set(Agent, "baz")
+ c.Assert(err, check.IsNil)
+
+ err = cap.Set(Agent, "bar")
+ c.Assert(err, check.IsNil)
+
+ c.Assert(cap.String(), check.Equals, "agent=bar")
+}
+
func (s *SuiteCapabilities) TestGetEmpty(c *check.C) {
cap := NewList()
c.Assert(cap.Get(Agent), check.HasLen, 0)
diff --git a/plumbing/protocol/packp/sideband/demux_test.go b/plumbing/protocol/packp/sideband/demux_test.go
index 6cda703..8f23353 100644
--- a/plumbing/protocol/packp/sideband/demux_test.go
+++ b/plumbing/protocol/packp/sideband/demux_test.go
@@ -4,7 +4,6 @@ import (
"bytes"
"errors"
"io"
- "io/ioutil"
"testing"
"github.com/go-git/go-git/v5/plumbing/format/pktline"
@@ -101,7 +100,7 @@ func (s *SidebandSuite) TestDecodeWithProgress(c *C) {
c.Assert(n, Equals, 26)
c.Assert(content, DeepEquals, expected)
- progress, err := ioutil.ReadAll(output)
+ progress, err := io.ReadAll(output)
c.Assert(err, IsNil)
c.Assert(progress, DeepEquals, []byte{'F', 'O', 'O', '\n'})
}
diff --git a/plumbing/protocol/packp/srvresp.go b/plumbing/protocol/packp/srvresp.go
index b3a7ee8..8cd0a72 100644
--- a/plumbing/protocol/packp/srvresp.go
+++ b/plumbing/protocol/packp/srvresp.go
@@ -21,11 +21,6 @@ type ServerResponse struct {
// Decode decodes the response into the struct, isMultiACK should be true, if
// the request was done with multi_ack or multi_ack_detailed capabilities.
func (r *ServerResponse) Decode(reader *bufio.Reader, isMultiACK bool) error {
- // TODO: implement support for multi_ack or multi_ack_detailed responses
- if isMultiACK {
- return errors.New("multi_ack and multi_ack_detailed are not supported")
- }
-
s := pktline.NewScanner(reader)
for s.Scan() {
@@ -48,7 +43,23 @@ func (r *ServerResponse) Decode(reader *bufio.Reader, isMultiACK bool) error {
}
}
- return s.Err()
+ // isMultiACK is true when the remote server advertises the related
+ // capabilities when they are not in transport.UnsupportedCapabilities.
+ //
+ // Users may decide to remove multi_ack and multi_ack_detailed from the
+ // unsupported capabilities list, which allows them to do initial clones
+ // from Azure DevOps.
+ //
+ // Follow-up fetches may error, therefore errors are wrapped with additional
+ // information highlighting that this capabilities are not supported by go-git.
+ //
+ // TODO: Implement support for multi_ack or multi_ack_detailed responses.
+ err := s.Err()
+ if err != nil && isMultiACK {
+ return fmt.Errorf("multi_ack and multi_ack_detailed are not supported: %w", err)
+ }
+
+ return err
}
// stopReading detects when a valid command such as ACK or NAK is found to be
@@ -113,8 +124,9 @@ func (r *ServerResponse) decodeACKLine(line []byte) error {
}
// Encode encodes the ServerResponse into a writer.
-func (r *ServerResponse) Encode(w io.Writer) error {
- if len(r.ACKs) > 1 {
+func (r *ServerResponse) Encode(w io.Writer, isMultiACK bool) error {
+ if len(r.ACKs) > 1 && !isMultiACK {
+ // For further information, refer to comments in the Decode func above.
return errors.New("multi_ack and multi_ack_detailed are not supported")
}
diff --git a/plumbing/protocol/packp/srvresp_test.go b/plumbing/protocol/packp/srvresp_test.go
index 02fab42..aa0af52 100644
--- a/plumbing/protocol/packp/srvresp_test.go
+++ b/plumbing/protocol/packp/srvresp_test.go
@@ -72,8 +72,21 @@ func (s *ServerResponseSuite) TestDecodeMalformed(c *C) {
c.Assert(err, NotNil)
}
+// multi_ack isn't fully implemented, this ensures that Decode ignores that fact,
+// as in some circumstances that's OK to assume so.
+//
+// TODO: Review as part of multi_ack implementation.
func (s *ServerResponseSuite) TestDecodeMultiACK(c *C) {
+ raw := "" +
+ "0031ACK 1111111111111111111111111111111111111111\n" +
+ "0031ACK 6ecf0ef2c2dffb796033e5a02219af86ec6584e5\n" +
+ "00080PACK\n"
+
sr := &ServerResponse{}
- err := sr.Decode(bufio.NewReader(bytes.NewBuffer(nil)), true)
- c.Assert(err, NotNil)
+ err := sr.Decode(bufio.NewReader(bytes.NewBufferString(raw)), true)
+ c.Assert(err, IsNil)
+
+ c.Assert(sr.ACKs, HasLen, 2)
+ c.Assert(sr.ACKs[0], Equals, plumbing.NewHash("1111111111111111111111111111111111111111"))
+ c.Assert(sr.ACKs[1], Equals, plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5"))
}
diff --git a/plumbing/protocol/packp/ulreq.go b/plumbing/protocol/packp/ulreq.go
index ddec06e..344f8c7 100644
--- a/plumbing/protocol/packp/ulreq.go
+++ b/plumbing/protocol/packp/ulreq.go
@@ -95,7 +95,7 @@ func NewUploadRequestFromCapabilities(adv *capability.List) *UploadRequest {
}
if adv.Supports(capability.Agent) {
- r.Capabilities.Set(capability.Agent, capability.DefaultAgent)
+ r.Capabilities.Set(capability.Agent, capability.DefaultAgent())
}
return r
diff --git a/plumbing/protocol/packp/ulreq_decode_test.go b/plumbing/protocol/packp/ulreq_decode_test.go
index 9628f0f..efcc7b4 100644
--- a/plumbing/protocol/packp/ulreq_decode_test.go
+++ b/plumbing/protocol/packp/ulreq_decode_test.go
@@ -8,6 +8,7 @@ import (
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/format/pktline"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/plumbing/protocol/packp/capability"
. "gopkg.in/check.v1"
@@ -119,8 +120,8 @@ type byHash []plumbing.Hash
func (a byHash) Len() int { return len(a) }
func (a byHash) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a byHash) Less(i, j int) bool {
- ii := [20]byte(a[i])
- jj := [20]byte(a[j])
+ ii := [hash.Size]byte(a[i])
+ jj := [hash.Size]byte(a[j])
return bytes.Compare(ii[:], jj[:]) < 0
}
diff --git a/plumbing/protocol/packp/ulreq_test.go b/plumbing/protocol/packp/ulreq_test.go
index 0b3b616..2797a4e 100644
--- a/plumbing/protocol/packp/ulreq_test.go
+++ b/plumbing/protocol/packp/ulreq_test.go
@@ -25,7 +25,7 @@ func (s *UlReqSuite) TestNewUploadRequestFromCapabilities(c *C) {
r := NewUploadRequestFromCapabilities(cap)
c.Assert(r.Capabilities.String(), Equals,
- "multi_ack_detailed side-band-64k thin-pack ofs-delta agent=go-git/4.x",
+ "multi_ack_detailed side-band-64k thin-pack ofs-delta agent=go-git/5.x",
)
}
diff --git a/plumbing/protocol/packp/updreq.go b/plumbing/protocol/packp/updreq.go
index 5dbd8ac..8f39b39 100644
--- a/plumbing/protocol/packp/updreq.go
+++ b/plumbing/protocol/packp/updreq.go
@@ -59,7 +59,7 @@ func NewReferenceUpdateRequestFromCapabilities(adv *capability.List) *ReferenceU
r := NewReferenceUpdateRequest()
if adv.Supports(capability.Agent) {
- r.Capabilities.Set(capability.Agent, capability.DefaultAgent)
+ r.Capabilities.Set(capability.Agent, capability.DefaultAgent())
}
if adv.Supports(capability.ReportStatus) {
diff --git a/plumbing/protocol/packp/updreq_decode.go b/plumbing/protocol/packp/updreq_decode.go
index 2c9843a..076de54 100644
--- a/plumbing/protocol/packp/updreq_decode.go
+++ b/plumbing/protocol/packp/updreq_decode.go
@@ -6,7 +6,6 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/format/pktline"
@@ -81,7 +80,7 @@ func (req *ReferenceUpdateRequest) Decode(r io.Reader) error {
var ok bool
rc, ok = r.(io.ReadCloser)
if !ok {
- rc = ioutil.NopCloser(r)
+ rc = io.NopCloser(r)
}
d := &updReqDecoder{r: rc, s: pktline.NewScanner(r)}
diff --git a/plumbing/protocol/packp/updreq_decode_test.go b/plumbing/protocol/packp/updreq_decode_test.go
index 2630112..bdcbdf5 100644
--- a/plumbing/protocol/packp/updreq_decode_test.go
+++ b/plumbing/protocol/packp/updreq_decode_test.go
@@ -3,7 +3,6 @@ package packp
import (
"bytes"
"io"
- "io/ioutil"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/format/pktline"
@@ -157,7 +156,7 @@ func (s *UpdReqDecodeSuite) TestOneUpdateCommand(c *C) {
expected.Commands = []*Command{
{Name: name, Old: hash1, New: hash2},
}
- expected.Packfile = ioutil.NopCloser(bytes.NewReader([]byte{}))
+ expected.Packfile = io.NopCloser(bytes.NewReader([]byte{}))
payloads := []string{
"1ecf0ef2c2dffb796033e5a02219af86ec6584e5 2ecf0ef2c2dffb796033e5a02219af86ec6584e5 myref\x00",
@@ -177,7 +176,7 @@ func (s *UpdReqDecodeSuite) TestMultipleCommands(c *C) {
{Name: plumbing.ReferenceName("myref2"), Old: plumbing.ZeroHash, New: hash2},
{Name: plumbing.ReferenceName("myref3"), Old: hash1, New: plumbing.ZeroHash},
}
- expected.Packfile = ioutil.NopCloser(bytes.NewReader([]byte{}))
+ expected.Packfile = io.NopCloser(bytes.NewReader([]byte{}))
payloads := []string{
"1ecf0ef2c2dffb796033e5a02219af86ec6584e5 2ecf0ef2c2dffb796033e5a02219af86ec6584e5 myref1\x00",
@@ -200,7 +199,7 @@ func (s *UpdReqDecodeSuite) TestMultipleCommandsAndCapabilities(c *C) {
{Name: plumbing.ReferenceName("myref3"), Old: hash1, New: plumbing.ZeroHash},
}
expected.Capabilities.Add("shallow")
- expected.Packfile = ioutil.NopCloser(bytes.NewReader([]byte{}))
+ expected.Packfile = io.NopCloser(bytes.NewReader([]byte{}))
payloads := []string{
"1ecf0ef2c2dffb796033e5a02219af86ec6584e5 2ecf0ef2c2dffb796033e5a02219af86ec6584e5 myref1\x00shallow",
@@ -224,7 +223,7 @@ func (s *UpdReqDecodeSuite) TestMultipleCommandsAndCapabilitiesShallow(c *C) {
}
expected.Capabilities.Add("shallow")
expected.Shallow = &hash1
- expected.Packfile = ioutil.NopCloser(bytes.NewReader([]byte{}))
+ expected.Packfile = io.NopCloser(bytes.NewReader([]byte{}))
payloads := []string{
"shallow 1ecf0ef2c2dffb796033e5a02219af86ec6584e5",
@@ -247,7 +246,7 @@ func (s *UpdReqDecodeSuite) TestWithPackfile(c *C) {
{Name: name, Old: hash1, New: hash2},
}
packfileContent := []byte("PACKabc")
- expected.Packfile = ioutil.NopCloser(bytes.NewReader(packfileContent))
+ expected.Packfile = io.NopCloser(bytes.NewReader(packfileContent))
payloads := []string{
"1ecf0ef2c2dffb796033e5a02219af86ec6584e5 2ecf0ef2c2dffb796033e5a02219af86ec6584e5 myref\x00",
@@ -298,10 +297,10 @@ func (s *UpdReqDecodeSuite) testDecodeOkExpected(c *C, expected *ReferenceUpdate
}
func (s *UpdReqDecodeSuite) compareReaders(c *C, a io.ReadCloser, b io.ReadCloser) {
- pba, err := ioutil.ReadAll(a)
+ pba, err := io.ReadAll(a)
c.Assert(err, IsNil)
c.Assert(a.Close(), IsNil)
- pbb, err := ioutil.ReadAll(b)
+ pbb, err := io.ReadAll(b)
c.Assert(err, IsNil)
c.Assert(b.Close(), IsNil)
c.Assert(pba, DeepEquals, pbb)
diff --git a/plumbing/protocol/packp/updreq_encode_test.go b/plumbing/protocol/packp/updreq_encode_test.go
index 4370b79..97868bd 100644
--- a/plumbing/protocol/packp/updreq_encode_test.go
+++ b/plumbing/protocol/packp/updreq_encode_test.go
@@ -2,13 +2,12 @@ package packp
import (
"bytes"
+ "io"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/format/pktline"
"github.com/go-git/go-git/v5/plumbing/protocol/packp/capability"
- "io/ioutil"
-
. "gopkg.in/check.v1"
)
@@ -128,7 +127,7 @@ func (s *UpdReqEncodeSuite) TestWithPackfile(c *C) {
packfileContent := []byte("PACKabc")
packfileReader := bytes.NewReader(packfileContent)
- packfileReadCloser := ioutil.NopCloser(packfileReader)
+ packfileReadCloser := io.NopCloser(packfileReader)
r := NewReferenceUpdateRequest()
r.Commands = []*Command{
diff --git a/plumbing/protocol/packp/updreq_test.go b/plumbing/protocol/packp/updreq_test.go
index c4ccbaf..80e03fb 100644
--- a/plumbing/protocol/packp/updreq_test.go
+++ b/plumbing/protocol/packp/updreq_test.go
@@ -23,14 +23,14 @@ func (s *UpdReqSuite) TestNewReferenceUpdateRequestFromCapabilities(c *C) {
r := NewReferenceUpdateRequestFromCapabilities(cap)
c.Assert(r.Capabilities.String(), Equals,
- "agent=go-git/4.x report-status",
+ "agent=go-git/5.x report-status",
)
cap = capability.NewList()
cap.Set(capability.Agent, "foo")
r = NewReferenceUpdateRequestFromCapabilities(cap)
- c.Assert(r.Capabilities.String(), Equals, "agent=go-git/4.x")
+ c.Assert(r.Capabilities.String(), Equals, "agent=go-git/5.x")
cap = capability.NewList()
diff --git a/plumbing/protocol/packp/uppackreq.go b/plumbing/protocol/packp/uppackreq.go
index de2206b..48f4438 100644
--- a/plumbing/protocol/packp/uppackreq.go
+++ b/plumbing/protocol/packp/uppackreq.go
@@ -38,10 +38,10 @@ func NewUploadPackRequestFromCapabilities(adv *capability.List) *UploadPackReque
}
}
-// IsEmpty a request if empty if Haves are contained in the Wants, or if Wants
-// length is zero
+// IsEmpty returns whether a request is empty - it is empty if Haves are contained
+// in the Wants, or if Wants length is zero, and we don't have any shallows
func (r *UploadPackRequest) IsEmpty() bool {
- return isSubset(r.Wants, r.Haves)
+ return isSubset(r.Wants, r.Haves) && len(r.Shallows) == 0
}
func isSubset(needle []plumbing.Hash, haystack []plumbing.Hash) bool {
diff --git a/plumbing/protocol/packp/uppackreq_test.go b/plumbing/protocol/packp/uppackreq_test.go
index f723e3c..ad38565 100644
--- a/plumbing/protocol/packp/uppackreq_test.go
+++ b/plumbing/protocol/packp/uppackreq_test.go
@@ -18,7 +18,7 @@ func (s *UploadPackRequestSuite) TestNewUploadPackRequestFromCapabilities(c *C)
cap.Set(capability.Agent, "foo")
r := NewUploadPackRequestFromCapabilities(cap)
- c.Assert(r.Capabilities.String(), Equals, "agent=go-git/4.x")
+ c.Assert(r.Capabilities.String(), Equals, "agent=go-git/5.x")
}
func (s *UploadPackRequestSuite) TestIsEmpty(c *C) {
@@ -41,6 +41,13 @@ func (s *UploadPackRequestSuite) TestIsEmpty(c *C) {
r.Haves = append(r.Haves, plumbing.NewHash("d82f291cde9987322c8a0c81a325e1ba6159684c"))
c.Assert(r.IsEmpty(), Equals, true)
+
+ r = NewUploadPackRequest()
+ r.Wants = append(r.Wants, plumbing.NewHash("d82f291cde9987322c8a0c81a325e1ba6159684c"))
+ r.Haves = append(r.Haves, plumbing.NewHash("d82f291cde9987322c8a0c81a325e1ba6159684c"))
+ r.Shallows = append(r.Shallows, plumbing.NewHash("2b41ef280fdb67a9b250678686a0c3e03b0a9989"))
+
+ c.Assert(r.IsEmpty(), Equals, false)
}
type UploadHavesSuite struct{}
diff --git a/plumbing/protocol/packp/uppackresp.go b/plumbing/protocol/packp/uppackresp.go
index 26ae61e..a485cb7 100644
--- a/plumbing/protocol/packp/uppackresp.go
+++ b/plumbing/protocol/packp/uppackresp.go
@@ -78,7 +78,7 @@ func (r *UploadPackResponse) Encode(w io.Writer) (err error) {
}
}
- if err := r.ServerResponse.Encode(w); err != nil {
+ if err := r.ServerResponse.Encode(w, r.isMultiACK); err != nil {
return err
}
diff --git a/plumbing/protocol/packp/uppackresp_test.go b/plumbing/protocol/packp/uppackresp_test.go
index 260dc57..8fbf924 100644
--- a/plumbing/protocol/packp/uppackresp_test.go
+++ b/plumbing/protocol/packp/uppackresp_test.go
@@ -2,7 +2,7 @@ package packp
import (
"bytes"
- "io/ioutil"
+ "io"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/protocol/packp/capability"
@@ -21,10 +21,10 @@ func (s *UploadPackResponseSuite) TestDecodeNAK(c *C) {
res := NewUploadPackResponse(req)
defer res.Close()
- err := res.Decode(ioutil.NopCloser(bytes.NewBufferString(raw)))
+ err := res.Decode(io.NopCloser(bytes.NewBufferString(raw)))
c.Assert(err, IsNil)
- pack, err := ioutil.ReadAll(res)
+ pack, err := io.ReadAll(res)
c.Assert(err, IsNil)
c.Assert(pack, DeepEquals, []byte("PACK"))
}
@@ -38,10 +38,10 @@ func (s *UploadPackResponseSuite) TestDecodeDepth(c *C) {
res := NewUploadPackResponse(req)
defer res.Close()
- err := res.Decode(ioutil.NopCloser(bytes.NewBufferString(raw)))
+ err := res.Decode(io.NopCloser(bytes.NewBufferString(raw)))
c.Assert(err, IsNil)
- pack, err := ioutil.ReadAll(res)
+ pack, err := io.ReadAll(res)
c.Assert(err, IsNil)
c.Assert(pack, DeepEquals, []byte("PACK"))
}
@@ -55,10 +55,14 @@ func (s *UploadPackResponseSuite) TestDecodeMalformed(c *C) {
res := NewUploadPackResponse(req)
defer res.Close()
- err := res.Decode(ioutil.NopCloser(bytes.NewBufferString(raw)))
+ err := res.Decode(io.NopCloser(bytes.NewBufferString(raw)))
c.Assert(err, NotNil)
}
+// multi_ack isn't fully implemented, this ensures that Decode ignores that fact,
+// as in some circumstances that's OK to assume so.
+//
+// TODO: Review as part of multi_ack implementation.
func (s *UploadPackResponseSuite) TestDecodeMultiACK(c *C) {
req := NewUploadPackRequest()
req.Capabilities.Set(capability.MultiACK)
@@ -66,8 +70,8 @@ func (s *UploadPackResponseSuite) TestDecodeMultiACK(c *C) {
res := NewUploadPackResponse(req)
defer res.Close()
- err := res.Decode(ioutil.NopCloser(bytes.NewBuffer(nil)))
- c.Assert(err, NotNil)
+ err := res.Decode(io.NopCloser(bytes.NewBuffer(nil)))
+ c.Assert(err, IsNil)
}
func (s *UploadPackResponseSuite) TestReadNoDecode(c *C) {
@@ -83,7 +87,7 @@ func (s *UploadPackResponseSuite) TestReadNoDecode(c *C) {
}
func (s *UploadPackResponseSuite) TestEncodeNAK(c *C) {
- pf := ioutil.NopCloser(bytes.NewBuffer([]byte("[PACK]")))
+ pf := io.NopCloser(bytes.NewBuffer([]byte("[PACK]")))
req := NewUploadPackRequest()
res := NewUploadPackResponseWithPackfile(req, pf)
defer func() { c.Assert(res.Close(), IsNil) }()
@@ -96,7 +100,7 @@ func (s *UploadPackResponseSuite) TestEncodeNAK(c *C) {
}
func (s *UploadPackResponseSuite) TestEncodeDepth(c *C) {
- pf := ioutil.NopCloser(bytes.NewBuffer([]byte("PACK")))
+ pf := io.NopCloser(bytes.NewBuffer([]byte("PACK")))
req := NewUploadPackRequest()
req.Depth = DepthCommits(1)
@@ -111,7 +115,7 @@ func (s *UploadPackResponseSuite) TestEncodeDepth(c *C) {
}
func (s *UploadPackResponseSuite) TestEncodeMultiACK(c *C) {
- pf := ioutil.NopCloser(bytes.NewBuffer([]byte("[PACK]")))
+ pf := io.NopCloser(bytes.NewBuffer([]byte("[PACK]")))
req := NewUploadPackRequest()
res := NewUploadPackResponseWithPackfile(req, pf)
diff --git a/plumbing/reference.go b/plumbing/reference.go
index 08e908f..5a67f69 100644
--- a/plumbing/reference.go
+++ b/plumbing/reference.go
@@ -15,10 +15,11 @@ const (
symrefPrefix = "ref: "
)
-// RefRevParseRules are a set of rules to parse references into short names.
-// These are the same rules as used by git in shorten_unambiguous_ref.
+// RefRevParseRules are a set of rules to parse references into short names, or expand into a full reference.
+// These are the same rules as used by git in shorten_unambiguous_ref and expand_ref.
// See: https://github.com/git/git/blob/e0aaa1b6532cfce93d87af9bc813fb2e7a7ce9d7/refs.c#L417
var RefRevParseRules = []string{
+ "%s",
"refs/%s",
"refs/tags/%s",
"refs/heads/%s",
@@ -113,7 +114,7 @@ func (r ReferenceName) String() string {
func (r ReferenceName) Short() string {
s := string(r)
res := s
- for _, format := range RefRevParseRules {
+ for _, format := range RefRevParseRules[1:] {
_, err := fmt.Sscanf(s, format, &res)
if err == nil {
continue
@@ -126,6 +127,7 @@ func (r ReferenceName) Short() string {
const (
HEAD ReferenceName = "HEAD"
Master ReferenceName = "refs/heads/master"
+ Main ReferenceName = "refs/heads/main"
)
// Reference is a representation of git reference
@@ -168,22 +170,22 @@ func NewHashReference(n ReferenceName, h Hash) *Reference {
}
}
-// Type return the type of a reference
+// Type returns the type of a reference
func (r *Reference) Type() ReferenceType {
return r.t
}
-// Name return the name of a reference
+// Name returns the name of a reference
func (r *Reference) Name() ReferenceName {
return r.n
}
-// Hash return the hash of a hash reference
+// Hash returns the hash of a hash reference
func (r *Reference) Hash() Hash {
return r.h
}
-// Target return the target of a symbolic reference
+// Target returns the target of a symbolic reference
func (r *Reference) Target() ReferenceName {
return r.target
}
@@ -204,6 +206,21 @@ func (r *Reference) Strings() [2]string {
}
func (r *Reference) String() string {
- s := r.Strings()
- return fmt.Sprintf("%s %s", s[1], s[0])
+ ref := ""
+ switch r.Type() {
+ case HashReference:
+ ref = r.Hash().String()
+ case SymbolicReference:
+ ref = symrefPrefix + r.Target().String()
+ default:
+ return ""
+ }
+
+ name := r.Name().String()
+ var v strings.Builder
+ v.Grow(len(ref) + len(name) + 1)
+ v.WriteString(ref)
+ v.WriteString(" ")
+ v.WriteString(name)
+ return v.String()
}
diff --git a/plumbing/reference_test.go b/plumbing/reference_test.go
index b3ccf53..04dfef9 100644
--- a/plumbing/reference_test.go
+++ b/plumbing/reference_test.go
@@ -1,6 +1,10 @@
package plumbing
-import . "gopkg.in/check.v1"
+import (
+ "testing"
+
+ . "gopkg.in/check.v1"
+)
type ReferenceSuite struct{}
@@ -98,3 +102,21 @@ func (s *ReferenceSuite) TestIsTag(c *C) {
r := ReferenceName("refs/tags/v3.1.")
c.Assert(r.IsTag(), Equals, true)
}
+
+func benchMarkReferenceString(r *Reference, b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ _ = r.String()
+ }
+}
+
+func BenchmarkReferenceStringSymbolic(b *testing.B) {
+ benchMarkReferenceString(NewSymbolicReference("v3.1.1", "refs/tags/v3.1.1"), b)
+}
+
+func BenchmarkReferenceStringHash(b *testing.B) {
+ benchMarkReferenceString(NewHashReference("v3.1.1", NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")), b)
+}
+
+func BenchmarkReferenceStringInvalid(b *testing.B) {
+ benchMarkReferenceString(&Reference{}, b)
+}
diff --git a/plumbing/transport/client/client.go b/plumbing/transport/client/client.go
index 20c3d05..1948c23 100644
--- a/plumbing/transport/client/client.go
+++ b/plumbing/transport/client/client.go
@@ -3,10 +3,7 @@
package client
import (
- "crypto/tls"
- "crypto/x509"
"fmt"
- gohttp "net/http"
"github.com/go-git/go-git/v5/plumbing/transport"
"github.com/go-git/go-git/v5/plumbing/transport/file"
@@ -24,14 +21,6 @@ var Protocols = map[string]transport.Transport{
"file": file.DefaultClient,
}
-var insecureClient = http.NewClient(&gohttp.Client{
- Transport: &gohttp.Transport{
- TLSClientConfig: &tls.Config{
- InsecureSkipVerify: true,
- },
- },
-})
-
// InstallProtocol adds or modifies an existing protocol.
func InstallProtocol(scheme string, c transport.Transport) {
if c == nil {
@@ -50,27 +39,6 @@ func NewClient(endpoint *transport.Endpoint) (transport.Transport, error) {
}
func getTransport(endpoint *transport.Endpoint) (transport.Transport, error) {
- if endpoint.Protocol == "https" {
- if endpoint.InsecureSkipTLS {
- return insecureClient, nil
- }
-
- if len(endpoint.CaBundle) != 0 {
- rootCAs, _ := x509.SystemCertPool()
- if rootCAs == nil {
- rootCAs = x509.NewCertPool()
- }
- rootCAs.AppendCertsFromPEM(endpoint.CaBundle)
- return http.NewClient(&gohttp.Client{
- Transport: &gohttp.Transport{
- TLSClientConfig: &tls.Config{
- RootCAs: rootCAs,
- },
- },
- }), nil
- }
- }
-
f, ok := Protocols[endpoint.Protocol]
if !ok {
return nil, fmt.Errorf("unsupported scheme %q", endpoint.Protocol)
diff --git a/plumbing/transport/common.go b/plumbing/transport/common.go
index a9ee2ca..c6a054a 100644
--- a/plumbing/transport/common.go
+++ b/plumbing/transport/common.go
@@ -112,10 +112,41 @@ type Endpoint struct {
Port int
// Path is the repository path.
Path string
- // InsecureSkipTLS skips ssl verify if protocal is https
+ // InsecureSkipTLS skips ssl verify if protocol is https
InsecureSkipTLS bool
// CaBundle specify additional ca bundle with system cert pool
CaBundle []byte
+ // Proxy provides info required for connecting to a proxy.
+ Proxy ProxyOptions
+}
+
+type ProxyOptions struct {
+ URL string
+ Username string
+ Password string
+}
+
+func (o *ProxyOptions) Validate() error {
+ if o.URL != "" {
+ _, err := url.Parse(o.URL)
+ return err
+ }
+ return nil
+}
+
+func (o *ProxyOptions) FullURL() (*url.URL, error) {
+ proxyURL, err := url.Parse(o.URL)
+ if err != nil {
+ return nil, err
+ }
+ if o.Username != "" {
+ if o.Password != "" {
+ proxyURL.User = url.UserPassword(o.Username, o.Password)
+ } else {
+ proxyURL.User = url.User(o.Username)
+ }
+ }
+ return proxyURL, nil
}
var defaultPorts = map[string]int{
@@ -196,11 +227,17 @@ func parseURL(endpoint string) (*Endpoint, error) {
pass, _ = u.User.Password()
}
+ host := u.Hostname()
+ if strings.Contains(host, ":") {
+ // IPv6 address
+ host = "[" + host + "]"
+ }
+
return &Endpoint{
Protocol: u.Scheme,
User: user,
Password: pass,
- Host: u.Hostname(),
+ Host: host,
Port: getPort(u),
Path: getPath(u),
}, nil
diff --git a/plumbing/transport/common_test.go b/plumbing/transport/common_test.go
index 0c5a01a..d9f12ab 100644
--- a/plumbing/transport/common_test.go
+++ b/plumbing/transport/common_test.go
@@ -95,16 +95,28 @@ func (s *SuiteCommon) TestNewEndpointSCPLike(c *C) {
c.Assert(e.String(), Equals, "ssh://git@github.com/user/repository.git")
}
-func (s *SuiteCommon) TestNewEndpointSCPLikeWithPort(c *C) {
+func (s *SuiteCommon) TestNewEndpointSCPLikeWithNumericPath(c *C) {
e, err := NewEndpoint("git@github.com:9999/user/repository.git")
c.Assert(err, IsNil)
c.Assert(e.Protocol, Equals, "ssh")
c.Assert(e.User, Equals, "git")
c.Assert(e.Password, Equals, "")
c.Assert(e.Host, Equals, "github.com")
- c.Assert(e.Port, Equals, 9999)
- c.Assert(e.Path, Equals, "user/repository.git")
- c.Assert(e.String(), Equals, "ssh://git@github.com:9999/user/repository.git")
+ c.Assert(e.Port, Equals, 22)
+ c.Assert(e.Path, Equals, "9999/user/repository.git")
+ c.Assert(e.String(), Equals, "ssh://git@github.com/9999/user/repository.git")
+}
+
+func (s *SuiteCommon) TestNewEndpointSCPLikeWithPort(c *C) {
+ e, err := NewEndpoint("git@github.com:8080:9999/user/repository.git")
+ c.Assert(err, IsNil)
+ c.Assert(e.Protocol, Equals, "ssh")
+ c.Assert(e.User, Equals, "git")
+ c.Assert(e.Password, Equals, "")
+ c.Assert(e.Host, Equals, "github.com")
+ c.Assert(e.Port, Equals, 8080)
+ c.Assert(e.Path, Equals, "9999/user/repository.git")
+ c.Assert(e.String(), Equals, "ssh://git@github.com:8080/9999/user/repository.git")
}
func (s *SuiteCommon) TestNewEndpointFileAbs(c *C) {
@@ -186,3 +198,15 @@ func (s *SuiteCommon) TestFilterUnsupportedCapabilities(c *C) {
FilterUnsupportedCapabilities(l)
c.Assert(l.Supports(capability.MultiACK), Equals, false)
}
+
+func (s *SuiteCommon) TestNewEndpointIPv6(c *C) {
+ // see issue https://github.com/go-git/go-git/issues/740
+ //
+ // IPv6 host names are not being properly handled, which results in unhelpful
+ // error messages depending on the format used.
+ //
+ e, err := NewEndpoint("http://[::1]:8080/foo.git")
+ c.Assert(err, IsNil)
+ c.Assert(e.Host, Equals, "[::1]")
+ c.Assert(e.String(), Equals, "http://[::1]:8080/foo.git")
+}
diff --git a/plumbing/transport/file/common_test.go b/plumbing/transport/file/common_test.go
index 4d6612b..7e033a8 100644
--- a/plumbing/transport/file/common_test.go
+++ b/plumbing/transport/file/common_test.go
@@ -1,7 +1,6 @@
package file
import (
- "io/ioutil"
"os"
"os/exec"
"path/filepath"
@@ -25,7 +24,7 @@ func (s *CommonSuite) SetUpSuite(c *C) {
}
var err error
- s.tmpDir, err = ioutil.TempDir("", "")
+ s.tmpDir, err = os.MkdirTemp("", "")
c.Assert(err, IsNil)
s.ReceivePackBin = filepath.Join(s.tmpDir, "git-receive-pack")
s.UploadPackBin = filepath.Join(s.tmpDir, "git-upload-pack")
diff --git a/plumbing/transport/git/common.go b/plumbing/transport/git/common.go
index 306aae2..92fc0be 100644
--- a/plumbing/transport/git/common.go
+++ b/plumbing/transport/git/common.go
@@ -5,6 +5,7 @@ import (
"fmt"
"io"
"net"
+ "strconv"
"github.com/go-git/go-git/v5/plumbing/format/pktline"
"github.com/go-git/go-git/v5/plumbing/transport"
@@ -69,7 +70,7 @@ func (c *command) getHostWithPort() string {
port = DefaultPort
}
- return fmt.Sprintf("%s:%d", host, port)
+ return net.JoinHostPort(host, strconv.Itoa(port))
}
// StderrPipe git protocol doesn't have any dedicated error channel
@@ -77,14 +78,14 @@ func (c *command) StderrPipe() (io.Reader, error) {
return nil, nil
}
-// StdinPipe return the underlying connection as WriteCloser, wrapped to prevent
+// StdinPipe returns the underlying connection as WriteCloser, wrapped to prevent
// call to the Close function from the connection, a command execution in git
// protocol can't be closed or killed
func (c *command) StdinPipe() (io.WriteCloser, error) {
return ioutil.WriteNopCloser(c.conn), nil
}
-// StdoutPipe return the underlying connection as Reader
+// StdoutPipe returns the underlying connection as Reader
func (c *command) StdoutPipe() (io.Reader, error) {
return c.conn, nil
}
@@ -92,7 +93,7 @@ func (c *command) StdoutPipe() (io.Reader, error) {
func endpointToCommand(cmd string, ep *transport.Endpoint) string {
host := ep.Host
if ep.Port != DefaultPort {
- host = fmt.Sprintf("%s:%d", ep.Host, ep.Port)
+ host = net.JoinHostPort(ep.Host, strconv.Itoa(ep.Port))
}
return fmt.Sprintf("%s %s%chost=%s%c", cmd, ep.Path, 0, host, 0)
diff --git a/plumbing/transport/git/common_test.go b/plumbing/transport/git/common_test.go
index 3391aaf..7389919 100644
--- a/plumbing/transport/git/common_test.go
+++ b/plumbing/transport/git/common_test.go
@@ -2,7 +2,6 @@ package git
import (
"fmt"
- "io/ioutil"
"net"
"os"
"os/exec"
@@ -37,7 +36,7 @@ func (s *BaseSuite) SetUpTest(c *C) {
s.port, err = freePort()
c.Assert(err, IsNil)
- s.base, err = ioutil.TempDir(os.TempDir(), fmt.Sprintf("go-git-protocol-%d", s.port))
+ s.base, err = os.MkdirTemp(os.TempDir(), fmt.Sprintf("go-git-protocol-%d", s.port))
c.Assert(err, IsNil)
}
diff --git a/plumbing/transport/http/common.go b/plumbing/transport/http/common.go
index d57c0fe..a7cdc1e 100644
--- a/plumbing/transport/http/common.go
+++ b/plumbing/transport/http/common.go
@@ -4,16 +4,22 @@ package http
import (
"bytes"
"context"
+ "crypto/tls"
+ "crypto/x509"
"fmt"
"net"
"net/http"
+ "net/url"
+ "reflect"
"strconv"
"strings"
+ "sync"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/protocol/packp"
"github.com/go-git/go-git/v5/plumbing/transport"
"github.com/go-git/go-git/v5/utils/ioutil"
+ "github.com/golang/groupcache/lru"
)
// it requires a bytes.Buffer, because we need to know the length
@@ -67,6 +73,17 @@ func advertisedReferences(ctx context.Context, s *session, serviceName string) (
return nil, err
}
+ // Git 2.41+ returns a zero-id plus capabilities when an empty
+ // repository is being cloned. This skips the existing logic within
+ // advrefs_decode.decodeFirstHash, which expects a flush-pkt instead.
+ //
+ // This logic aligns with plumbing/transport/internal/common/common.go.
+ if ar.IsEmpty() &&
+ // Empty repositories are valid for git-receive-pack.
+ transport.ReceivePackServiceName != serviceName {
+ return nil, transport.ErrEmptyRemoteRepository
+ }
+
transport.FilterUnsupportedCapabilities(ar.Capabilities)
s.advRefs = ar
@@ -74,40 +91,83 @@ func advertisedReferences(ctx context.Context, s *session, serviceName string) (
}
type client struct {
- c *http.Client
+ c *http.Client
+ transports *lru.Cache
+ m sync.RWMutex
}
-// DefaultClient is the default HTTP client, which uses `http.DefaultClient`.
-var DefaultClient = NewClient(nil)
+// ClientOptions holds user configurable options for the client.
+type ClientOptions struct {
+ // CacheMaxEntries is the max no. of entries that the transport objects
+ // cache will hold at any given point of time. It must be a positive integer.
+ // Calling `client.addTransport()` after the cache has reached the specified
+ // size, will result in the least recently used transport getting deleted
+ // before the provided transport is added to the cache.
+ CacheMaxEntries int
+}
+
+var (
+ // defaultTransportCacheSize is the default capacity of the transport objects cache.
+ // Its value is 0 because transport caching is turned off by default and is an
+ // opt-in feature.
+ defaultTransportCacheSize = 0
+
+ // DefaultClient is the default HTTP client, which uses a net/http client configured
+ // with http.DefaultTransport.
+ DefaultClient = NewClient(nil)
+)
// NewClient creates a new client with a custom net/http client.
// See `InstallProtocol` to install and override default http client.
-// Unless a properly initialized client is given, it will fall back into
-// `http.DefaultClient`.
+// If the net/http client is nil or empty, it will use a net/http client configured
+// with http.DefaultTransport.
//
// Note that for HTTP client cannot distinguish between private repositories and
// unexistent repositories on GitHub. So it returns `ErrAuthorizationRequired`
// for both.
func NewClient(c *http.Client) transport.Transport {
if c == nil {
- return &client{http.DefaultClient}
+ c = &http.Client{
+ Transport: http.DefaultTransport,
+ }
}
+ return NewClientWithOptions(c, &ClientOptions{
+ CacheMaxEntries: defaultTransportCacheSize,
+ })
+}
- return &client{
+// NewClientWithOptions returns a new client configured with the provided net/http client
+// and other custom options specific to the client.
+// If the net/http client is nil or empty, it will use a net/http client configured
+// with http.DefaultTransport.
+func NewClientWithOptions(c *http.Client, opts *ClientOptions) transport.Transport {
+ if c == nil {
+ c = &http.Client{
+ Transport: http.DefaultTransport,
+ }
+ }
+ cl := &client{
c: c,
}
+
+ if opts != nil {
+ if opts.CacheMaxEntries > 0 {
+ cl.transports = lru.New(opts.CacheMaxEntries)
+ }
+ }
+ return cl
}
func (c *client) NewUploadPackSession(ep *transport.Endpoint, auth transport.AuthMethod) (
transport.UploadPackSession, error) {
- return newUploadPackSession(c.c, ep, auth)
+ return newUploadPackSession(c, ep, auth)
}
func (c *client) NewReceivePackSession(ep *transport.Endpoint, auth transport.AuthMethod) (
transport.ReceivePackSession, error) {
- return newReceivePackSession(c.c, ep, auth)
+ return newReceivePackSession(c, ep, auth)
}
type session struct {
@@ -117,10 +177,106 @@ type session struct {
advRefs *packp.AdvRefs
}
-func newSession(c *http.Client, ep *transport.Endpoint, auth transport.AuthMethod) (*session, error) {
+func transportWithInsecureTLS(transport *http.Transport) {
+ if transport.TLSClientConfig == nil {
+ transport.TLSClientConfig = &tls.Config{}
+ }
+ transport.TLSClientConfig.InsecureSkipVerify = true
+}
+
+func transportWithCABundle(transport *http.Transport, caBundle []byte) error {
+ rootCAs, err := x509.SystemCertPool()
+ if err != nil {
+ return err
+ }
+ if rootCAs == nil {
+ rootCAs = x509.NewCertPool()
+ }
+ rootCAs.AppendCertsFromPEM(caBundle)
+ if transport.TLSClientConfig == nil {
+ transport.TLSClientConfig = &tls.Config{}
+ }
+ transport.TLSClientConfig.RootCAs = rootCAs
+ return nil
+}
+
+func transportWithProxy(transport *http.Transport, proxyURL *url.URL) {
+ transport.Proxy = http.ProxyURL(proxyURL)
+}
+
+func configureTransport(transport *http.Transport, ep *transport.Endpoint) error {
+ if len(ep.CaBundle) > 0 {
+ if err := transportWithCABundle(transport, ep.CaBundle); err != nil {
+ return err
+ }
+ }
+ if ep.InsecureSkipTLS {
+ transportWithInsecureTLS(transport)
+ }
+
+ if ep.Proxy.URL != "" {
+ proxyURL, err := ep.Proxy.FullURL()
+ if err != nil {
+ return err
+ }
+ transportWithProxy(transport, proxyURL)
+ }
+ return nil
+}
+
+func newSession(c *client, ep *transport.Endpoint, auth transport.AuthMethod) (*session, error) {
+ var httpClient *http.Client
+
+ // We need to configure the http transport if there are transport specific
+ // options present in the endpoint.
+ if len(ep.CaBundle) > 0 || ep.InsecureSkipTLS || ep.Proxy.URL != "" {
+ var transport *http.Transport
+ // if the client wasn't configured to have a cache for transports then just configure
+ // the transport and use it directly, otherwise try to use the cache.
+ if c.transports == nil {
+ tr, ok := c.c.Transport.(*http.Transport)
+ if !ok {
+ return nil, fmt.Errorf("expected underlying client transport to be of type: %s; got: %s",
+ reflect.TypeOf(transport), reflect.TypeOf(c.c.Transport))
+ }
+
+ transport = tr.Clone()
+ configureTransport(transport, ep)
+ } else {
+ transportOpts := transportOptions{
+ caBundle: string(ep.CaBundle),
+ insecureSkipTLS: ep.InsecureSkipTLS,
+ }
+ if ep.Proxy.URL != "" {
+ proxyURL, err := ep.Proxy.FullURL()
+ if err != nil {
+ return nil, err
+ }
+ transportOpts.proxyURL = *proxyURL
+ }
+ var found bool
+ transport, found = c.fetchTransport(transportOpts)
+
+ if !found {
+ transport = c.c.Transport.(*http.Transport).Clone()
+ configureTransport(transport, ep)
+ c.addTransport(transportOpts, transport)
+ }
+ }
+
+ httpClient = &http.Client{
+ Transport: transport,
+ CheckRedirect: c.c.CheckRedirect,
+ Jar: c.c.Jar,
+ Timeout: c.c.Timeout,
+ }
+ } else {
+ httpClient = c.c
+ }
+
s := &session{
auth: basicAuthFromEndpoint(ep),
- client: c,
+ client: httpClient,
endpoint: ep,
}
if auth != nil {
diff --git a/plumbing/transport/http/common_test.go b/plumbing/transport/http/common_test.go
index 4122e62..1517228 100644
--- a/plumbing/transport/http/common_test.go
+++ b/plumbing/transport/http/common_test.go
@@ -3,7 +3,6 @@ package http
import (
"crypto/tls"
"fmt"
- "io/ioutil"
"log"
"net"
"net/http"
@@ -91,6 +90,60 @@ func (s *ClientSuite) TestNewHTTPError40x(c *C) {
"unexpected client error.*")
}
+func (s *ClientSuite) Test_newSession(c *C) {
+ cl := NewClientWithOptions(nil, &ClientOptions{
+ CacheMaxEntries: 2,
+ }).(*client)
+
+ insecureEP := s.Endpoint
+ insecureEP.InsecureSkipTLS = true
+ session, err := newSession(cl, insecureEP, nil)
+ c.Assert(err, IsNil)
+
+ sessionTransport := session.client.Transport.(*http.Transport)
+ c.Assert(sessionTransport.TLSClientConfig.InsecureSkipVerify, Equals, true)
+ t, ok := cl.fetchTransport(transportOptions{
+ insecureSkipTLS: true,
+ })
+ // transport should be cached.
+ c.Assert(ok, Equals, true)
+ // cached transport should be the one that's used.
+ c.Assert(sessionTransport, Equals, t)
+
+ caEndpoint := insecureEP
+ caEndpoint.CaBundle = []byte("this is the way")
+ session, err = newSession(cl, caEndpoint, nil)
+ c.Assert(err, IsNil)
+
+ sessionTransport = session.client.Transport.(*http.Transport)
+ c.Assert(sessionTransport.TLSClientConfig.InsecureSkipVerify, Equals, true)
+ c.Assert(sessionTransport.TLSClientConfig.RootCAs, NotNil)
+ t, ok = cl.fetchTransport(transportOptions{
+ insecureSkipTLS: true,
+ caBundle: "this is the way",
+ })
+ // transport should be cached.
+ c.Assert(ok, Equals, true)
+ // cached transport should be the one that's used.
+ c.Assert(sessionTransport, Equals, t)
+
+ session, err = newSession(cl, caEndpoint, nil)
+ c.Assert(err, IsNil)
+ sessionTransport = session.client.Transport.(*http.Transport)
+ // transport that's going to be used should be cached already.
+ c.Assert(sessionTransport, Equals, t)
+ // no new transport got cached.
+ c.Assert(cl.transports.Len(), Equals, 2)
+
+ // if the cache does not exist, the transport should still be correctly configured.
+ cl.transports = nil
+ session, err = newSession(cl, insecureEP, nil)
+ c.Assert(err, IsNil)
+
+ sessionTransport = session.client.Transport.(*http.Transport)
+ c.Assert(sessionTransport.TLSClientConfig.InsecureSkipVerify, Equals, true)
+}
+
func (s *ClientSuite) testNewHTTPError(c *C, code int, msg string) {
req, _ := http.NewRequest("GET", "foo", nil)
res := &http.Response{
@@ -168,7 +221,7 @@ func (s *BaseSuite) SetUpTest(c *C) {
l, err := net.Listen("tcp", "localhost:0")
c.Assert(err, IsNil)
- base, err := ioutil.TempDir(os.TempDir(), fmt.Sprintf("go-git-http-%d", s.port))
+ base, err := os.MkdirTemp(os.TempDir(), fmt.Sprintf("go-git-http-%d", s.port))
c.Assert(err, IsNil)
s.port = l.Addr().(*net.TCPAddr).Port
diff --git a/plumbing/transport/http/internal/test/proxy_test.go b/plumbing/transport/http/internal/test/proxy_test.go
new file mode 100644
index 0000000..6ae2943
--- /dev/null
+++ b/plumbing/transport/http/internal/test/proxy_test.go
@@ -0,0 +1,72 @@
+package test
+
+import (
+ "context"
+ "crypto/tls"
+ "fmt"
+ "net"
+ nethttp "net/http"
+ "os"
+ "sync/atomic"
+ "testing"
+
+ "github.com/elazarl/goproxy"
+
+ "github.com/go-git/go-git/v5/plumbing/transport"
+ "github.com/go-git/go-git/v5/plumbing/transport/http"
+
+ . "gopkg.in/check.v1"
+)
+
+// Hook up gocheck into the "go test" runner.
+func Test(t *testing.T) { TestingT(t) }
+
+type ProxySuite struct{}
+
+var _ = Suite(&ProxySuite{})
+
+var proxiedRequests int32
+
+// This test tests proxy support via an env var, i.e. `HTTPS_PROXY`.
+// Its located in a separate package because golang caches the value
+// of proxy env vars leading to misleading/unexpected test results.
+func (s *ProxySuite) TestAdvertisedReferences(c *C) {
+ proxy := goproxy.NewProxyHttpServer()
+ proxy.Verbose = true
+ SetupHTTPSProxy(proxy, &proxiedRequests)
+ httpsListener, err := net.Listen("tcp", ":0")
+ c.Assert(err, IsNil)
+ defer httpsListener.Close()
+ httpProxyAddr := fmt.Sprintf("localhost:%d", httpsListener.Addr().(*net.TCPAddr).Port)
+
+ proxyServer := nethttp.Server{
+ Addr: httpProxyAddr,
+ Handler: proxy,
+ // Due to how golang manages http/2 when provided with custom TLS config,
+ // servers and clients running in the same process leads to issues.
+ // Ref: https://github.com/golang/go/issues/21336
+ TLSConfig: &tls.Config{
+ NextProtos: []string{"http/1.1"},
+ },
+ }
+ go proxyServer.ServeTLS(httpsListener, "../../testdata/certs/server.crt", "../../testdata/certs/server.key")
+ defer proxyServer.Close()
+ os.Setenv("HTTPS_PROXY", fmt.Sprintf("https://user:pass@%s", httpProxyAddr))
+ defer os.Unsetenv("HTTPS_PROXY")
+
+ endpoint, err := transport.NewEndpoint("https://github.com/git-fixtures/basic.git")
+ c.Assert(err, IsNil)
+ endpoint.InsecureSkipTLS = true
+
+ client := http.DefaultClient
+ session, err := client.NewUploadPackSession(endpoint, nil)
+ c.Assert(err, IsNil)
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+ info, err := session.AdvertisedReferencesContext(ctx)
+ c.Assert(err, IsNil)
+ c.Assert(info, NotNil)
+ proxyUsed := atomic.LoadInt32(&proxiedRequests) > 0
+ c.Assert(proxyUsed, Equals, true)
+}
diff --git a/plumbing/transport/http/internal/test/test_utils.go b/plumbing/transport/http/internal/test/test_utils.go
new file mode 100644
index 0000000..6665fb3
--- /dev/null
+++ b/plumbing/transport/http/internal/test/test_utils.go
@@ -0,0 +1,43 @@
+package test
+
+import (
+ "encoding/base64"
+ "strings"
+ "sync/atomic"
+
+ "github.com/elazarl/goproxy"
+)
+
+func SetupHTTPSProxy(proxy *goproxy.ProxyHttpServer, proxiedRequests *int32) {
+ var proxyHandler goproxy.FuncHttpsHandler = func(host string, ctx *goproxy.ProxyCtx) (*goproxy.ConnectAction, string) {
+ if strings.Contains(host, "github.com") {
+ user, pass, _ := ParseBasicAuth(ctx.Req.Header.Get("Proxy-Authorization"))
+ if user != "user" || pass != "pass" {
+ return goproxy.RejectConnect, host
+ }
+ atomic.AddInt32(proxiedRequests, 1)
+ return goproxy.OkConnect, host
+ }
+ // Reject if it isn't our request.
+ return goproxy.RejectConnect, host
+ }
+ proxy.OnRequest().HandleConnect(proxyHandler)
+}
+
+// adapted from https://github.com/golang/go/blob/2ef70d9d0f98832c8103a7968b195e560a8bb262/src/net/http/request.go#L959
+func ParseBasicAuth(auth string) (username, password string, ok bool) {
+ const prefix = "Basic "
+ if len(auth) < len(prefix) || !strings.EqualFold(auth[:len(prefix)], prefix) {
+ return "", "", false
+ }
+ c, err := base64.StdEncoding.DecodeString(auth[len(prefix):])
+ if err != nil {
+ return "", "", false
+ }
+ cs := string(c)
+ username, password, ok = strings.Cut(cs, ":")
+ if !ok {
+ return "", "", false
+ }
+ return username, password, true
+}
diff --git a/plumbing/transport/http/proxy_test.go b/plumbing/transport/http/proxy_test.go
new file mode 100644
index 0000000..f3024da
--- /dev/null
+++ b/plumbing/transport/http/proxy_test.go
@@ -0,0 +1,119 @@
+package http
+
+import (
+ "context"
+ "crypto/tls"
+ "fmt"
+ "net"
+ "net/http"
+ "strings"
+ "sync/atomic"
+
+ "github.com/elazarl/goproxy"
+ fixtures "github.com/go-git/go-git-fixtures/v4"
+ "github.com/go-git/go-git/v5/plumbing/transport"
+ "github.com/go-git/go-git/v5/plumbing/transport/http/internal/test"
+
+ . "gopkg.in/check.v1"
+)
+
+type ProxySuite struct {
+ u UploadPackSuite
+ fixtures.Suite
+}
+
+var _ = Suite(&ProxySuite{})
+
+var proxiedRequests int32
+
+func (s *ProxySuite) TestAdvertisedReferences(c *C) {
+ s.u.SetUpTest(c)
+ proxy := goproxy.NewProxyHttpServer()
+ proxy.Verbose = true
+ setupHTTPProxy(proxy, &proxiedRequests)
+ httpListener, err := net.Listen("tcp", ":0")
+ c.Assert(err, IsNil)
+ defer httpListener.Close()
+
+ httpProxyAddr := fmt.Sprintf("http://localhost:%d", httpListener.Addr().(*net.TCPAddr).Port)
+ proxyServer := http.Server{
+ Addr: httpProxyAddr,
+ Handler: proxy,
+ }
+ go proxyServer.Serve(httpListener)
+ defer proxyServer.Close()
+
+ endpoint := s.u.prepareRepository(c, fixtures.Basic().One(), "basic.git")
+ endpoint.Proxy = transport.ProxyOptions{
+ URL: httpProxyAddr,
+ Username: "user",
+ Password: "pass",
+ }
+
+ s.u.Client = NewClient(nil)
+ session, err := s.u.Client.NewUploadPackSession(endpoint, nil)
+ c.Assert(err, IsNil)
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+ info, err := session.AdvertisedReferencesContext(ctx)
+ c.Assert(err, IsNil)
+ c.Assert(info, NotNil)
+ proxyUsed := atomic.LoadInt32(&proxiedRequests) > 0
+ c.Assert(proxyUsed, Equals, true)
+
+ atomic.StoreInt32(&proxiedRequests, 0)
+ test.SetupHTTPSProxy(proxy, &proxiedRequests)
+ httpsListener, err := net.Listen("tcp", ":0")
+ c.Assert(err, IsNil)
+ defer httpsListener.Close()
+ httpsProxyAddr := fmt.Sprintf("https://localhost:%d", httpsListener.Addr().(*net.TCPAddr).Port)
+
+ tlsProxyServer := http.Server{
+ Addr: httpsProxyAddr,
+ Handler: proxy,
+ // Due to how golang manages http/2 when provided with custom TLS config,
+ // servers and clients running in the same process leads to issues.
+ // Ref: https://github.com/golang/go/issues/21336
+ TLSConfig: &tls.Config{
+ NextProtos: []string{"http/1.1"},
+ },
+ }
+ go tlsProxyServer.ServeTLS(httpsListener, "testdata/certs/server.crt", "testdata/certs/server.key")
+ defer tlsProxyServer.Close()
+
+ endpoint, err = transport.NewEndpoint("https://github.com/git-fixtures/basic.git")
+ c.Assert(err, IsNil)
+ endpoint.Proxy = transport.ProxyOptions{
+ URL: httpsProxyAddr,
+ Username: "user",
+ Password: "pass",
+ }
+ endpoint.InsecureSkipTLS = true
+
+ session, err = s.u.Client.NewUploadPackSession(endpoint, nil)
+ c.Assert(err, IsNil)
+
+ info, err = session.AdvertisedReferencesContext(ctx)
+ c.Assert(err, IsNil)
+ c.Assert(info, NotNil)
+ proxyUsed = atomic.LoadInt32(&proxiedRequests) > 0
+ c.Assert(proxyUsed, Equals, true)
+}
+
+func setupHTTPProxy(proxy *goproxy.ProxyHttpServer, proxiedRequests *int32) {
+ // The request is being forwarded to the local test git server in this handler.
+ var proxyHandler goproxy.FuncReqHandler = func(req *http.Request, ctx *goproxy.ProxyCtx) (*http.Request, *http.Response) {
+ if strings.Contains(req.Host, "localhost") {
+ user, pass, _ := test.ParseBasicAuth(req.Header.Get("Proxy-Authorization"))
+ if user != "user" || pass != "pass" {
+ return req, goproxy.NewResponse(req, goproxy.ContentTypeText, http.StatusUnauthorized, "")
+ }
+ atomic.AddInt32(proxiedRequests, 1)
+ return req, nil
+ }
+ // Reject if it isn't our request.
+ return req, goproxy.NewResponse(req, goproxy.ContentTypeText, http.StatusForbidden, "")
+ }
+ proxy.OnRequest().Do(proxyHandler)
+}
diff --git a/plumbing/transport/http/receive_pack.go b/plumbing/transport/http/receive_pack.go
index 4d14ff2..4387ecf 100644
--- a/plumbing/transport/http/receive_pack.go
+++ b/plumbing/transport/http/receive_pack.go
@@ -19,7 +19,7 @@ type rpSession struct {
*session
}
-func newReceivePackSession(c *http.Client, ep *transport.Endpoint, auth transport.AuthMethod) (transport.ReceivePackSession, error) {
+func newReceivePackSession(c *client, ep *transport.Endpoint, auth transport.AuthMethod) (transport.ReceivePackSession, error) {
s, err := newSession(c, ep, auth)
return &rpSession{s}, err
}
diff --git a/plumbing/transport/http/testdata/certs/server.crt b/plumbing/transport/http/testdata/certs/server.crt
new file mode 100644
index 0000000..9bdec2c
--- /dev/null
+++ b/plumbing/transport/http/testdata/certs/server.crt
@@ -0,0 +1,22 @@
+-----BEGIN CERTIFICATE-----
+MIIDkzCCAnugAwIBAgIUWcuzUyG3EfGsXVUH0BAmnCJyNHswDQYJKoZIhvcNAQEL
+BQAwWTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
+GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDESMBAGA1UEAwwJbG9jYWxob3N0MB4X
+DTIzMDMwNzA3MTgwNloXDTI0MDMwNjA3MTgwNlowWTELMAkGA1UEBhMCQVUxEzAR
+BgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5
+IEx0ZDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAvyKX6vJXt1u+WBfBNJByFDAb7msdsk6SiPFlX5uyilaWmlRxvLo1
+GZMjjuQbs4wU6BAoZcgiELnsC9GSyxgrhk7NIW3ud/QD7s8ZxETxFLb0ur6tJj7/
+ETEcU/AKSl1FpeJbGHqGipYp5+0GU0zPDxRYqC2N3+fcGZPQbBwxb1f+MrBjWutb
+3eNYTLdPH3W7RUqbunC1KZRJ8XOcU5XZ4qEaMkZYdz1QItxwPnpPuSZs53ga3TDF
+zclpQcT8OH2JNwSI6bwlwFJ0Es06manw7XHmgd8anhix9FdsQYaTOW4kqh1iKQ/P
+jPG50bdTUEqlOsaa+9av/qf+90npzt3xqQIDAQABo1MwUTAdBgNVHQ4EFgQUqTqb
+q+jiJVgwftQS+YLcQWnvTuAwHwYDVR0jBBgwFoAUqTqbq+jiJVgwftQS+YLcQWnv
+TuAwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAVUaFSikxyCy1
+4P/ZZgeuR7vEJ5vWBxKPw/jFNZUFWy2Ag32w1BhrDwoYoc1Awg76QF2TqBQAhFNm
+ek9aE+L83P/R2UhE9+LHnzwdMXt9HYOI1grONk2z3lMI1y4FCJBxHfGyC/XMoNgZ
+qP7UdLgLGTIMN3O1Fww416Hn8BHzxN4o5ZEHJZ6QPMuN8OLk9oVu3yQIq/QcmSDH
+GT2RiwT5IJWMUKK1UrV+y3/T9FwW2qqu+LX+coxjk7HgDWy3y66V9ahLBt8kONcr
+qK0zutoQ5WPSmvnD2Nr0LVLGXEd7hbQNO7bgjO2YOBtnagUQJt72i/OmvZv8Mfnp
+Bu6Qgl5hDw==
+-----END CERTIFICATE-----
diff --git a/plumbing/transport/http/testdata/certs/server.key b/plumbing/transport/http/testdata/certs/server.key
new file mode 100644
index 0000000..9a0cd8f
--- /dev/null
+++ b/plumbing/transport/http/testdata/certs/server.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC/Ipfq8le3W75Y
+F8E0kHIUMBvuax2yTpKI8WVfm7KKVpaaVHG8ujUZkyOO5BuzjBToEChlyCIQuewL
+0ZLLGCuGTs0hbe539APuzxnERPEUtvS6vq0mPv8RMRxT8ApKXUWl4lsYeoaKlinn
+7QZTTM8PFFioLY3f59wZk9BsHDFvV/4ysGNa61vd41hMt08fdbtFSpu6cLUplEnx
+c5xTldnioRoyRlh3PVAi3HA+ek+5JmzneBrdMMXNyWlBxPw4fYk3BIjpvCXAUnQS
+zTqZqfDtceaB3xqeGLH0V2xBhpM5biSqHWIpD8+M8bnRt1NQSqU6xpr71q/+p/73
+SenO3fGpAgMBAAECggEAQUjenQhzv5Rdmpdajcq8vHqGP9Rki0/dK1tQpex3elsD
+C+nGA5GSq46feaIeeCBjz7QdKE7Im+/1WUAXJLm3vCNUW5PB/UTixwIEKg7mTY4E
+X3jbiZHA661boKv/x9C+BmAff2fyZonN/ILwQymcG+l2MtOEfzMh8baUXSjwFbhg
+B08u4iXjee0y9I0CGMYWfasHLOIuhACCFKtqnvdQp8B82g8eSPhme5IjfPP8KZVr
+00n6z8m00HVk6/yYJ8pVZ82j3T+wH6IqvlvaC320sbto8YXV6i8GWHaJumzU4/9s
+IRm4459E+NmNcLNY/TCu89zsfrgNirN+qFfvJIOTxQKBgQDtME8s4UP0MhGuJ2lD
+1HD64fAxMC6Xp/QSzY91Yn79UNssUUV+IwjuUnLIz3U8DBs/QETLm7CkNtI7h5m5
+dBdeBBzCRGxhe8WqRfvceu4s0zr08ZkDaKLjFsBSnBsXZhKAAuRqBjnGAoAiKgVa
+WpEAug00ThhQjipSY9tO9NSBawKBgQDOSz+8m2HJFktEdSctKIB9DesqlAg7YCyy
+dHzywP0/r7wEvsCN7xPgCT5g8JBkRaFvLLKgw7gMKAUx8V2iwizEoDCAs/pbTWji
+uZwPC8lWtbkpBMQIaP4Wap+GyFQJKv1/qZduwpkwkj+ok+m3WwIW55VFGyLn3XGG
+VcLZm83aOwKBgQDXXI/nXjqHVZb8HEjWD+Ttx4yB/Q+xIAzbrc3edap8c5guKzUA
+DOulCTOz5bq65PsweTh970V6NVS6PKt12lUFRpKeSeZmtS2LJ7RCQ1RTWxAjK+MV
+V0LfEt9ZouhuXH3bwcSICFMY2VhirOjjW2xhzo0Cuw4UxqDi4kxU6rSxNQKBgQCI
+sn5KmV/jot0/QK40E0mJFEcHkM4foiwcGGqPZWiq4eUh89CefJTb+OQX0nCrsSQ3
+ChRXyTlU/NPsczcL2cVWiZt6PUihZZsh2cJaigHhbkuCrcDEneX4rrCE3IwrAwy1
+oohRAawG7nI2X8UYFbs9uDlGcKPhpvBKBtw13DM87wKBgE8fOiFoTph//6piU7dV
+pN33UfhPcAFwsIzxAH6Ljo6BYx2hfPRCxI2g0wchk6ydbDecLgMwVgugdJZ6+tRf
+P+YV3wEwPcWOvWby3+EmJh0cXUTl6ZMA+eR4pvCi6kf2xJf9dRmEeNNhOuzn9Y0J
+cT9yhBFG4iejKP0iTwET1JKY
+-----END PRIVATE KEY-----
diff --git a/plumbing/transport/http/transport.go b/plumbing/transport/http/transport.go
new file mode 100644
index 0000000..052f3c8
--- /dev/null
+++ b/plumbing/transport/http/transport.go
@@ -0,0 +1,40 @@
+package http
+
+import (
+ "net/http"
+ "net/url"
+)
+
+// transportOptions contains transport specific configuration.
+type transportOptions struct {
+ insecureSkipTLS bool
+ // []byte is not comparable.
+ caBundle string
+ proxyURL url.URL
+}
+
+func (c *client) addTransport(opts transportOptions, transport *http.Transport) {
+ c.m.Lock()
+ c.transports.Add(opts, transport)
+ c.m.Unlock()
+}
+
+func (c *client) removeTransport(opts transportOptions) {
+ c.m.Lock()
+ c.transports.Remove(opts)
+ c.m.Unlock()
+}
+
+func (c *client) fetchTransport(opts transportOptions) (*http.Transport, bool) {
+ c.m.RLock()
+ t, ok := c.transports.Get(opts)
+ c.m.RUnlock()
+ if !ok {
+ return nil, false
+ }
+ transport, ok := t.(*http.Transport)
+ if !ok {
+ return nil, false
+ }
+ return transport, true
+}
diff --git a/plumbing/transport/http/upload_pack.go b/plumbing/transport/http/upload_pack.go
index e735b3d..4f85145 100644
--- a/plumbing/transport/http/upload_pack.go
+++ b/plumbing/transport/http/upload_pack.go
@@ -19,7 +19,7 @@ type upSession struct {
*session
}
-func newUploadPackSession(c *http.Client, ep *transport.Endpoint, auth transport.AuthMethod) (transport.UploadPackSession, error) {
+func newUploadPackSession(c *client, ep *transport.Endpoint, auth transport.AuthMethod) (transport.UploadPackSession, error) {
s, err := newSession(c, ep, auth)
return &upSession{s}, err
}
diff --git a/plumbing/transport/http/upload_pack_test.go b/plumbing/transport/http/upload_pack_test.go
index c088ecc..abb7adf 100644
--- a/plumbing/transport/http/upload_pack_test.go
+++ b/plumbing/transport/http/upload_pack_test.go
@@ -3,7 +3,7 @@ package http
import (
"context"
"fmt"
- "io/ioutil"
+ "io"
"net/url"
"os"
"path/filepath"
@@ -49,7 +49,7 @@ func (s *UploadPackSuite) TestuploadPackRequestToReader(c *C) {
sr, err := uploadPackRequestToReader(r)
c.Assert(err, IsNil)
- b, _ := ioutil.ReadAll(sr)
+ b, _ := io.ReadAll(sr)
c.Assert(string(b), Equals,
"0032want 2b41ef280fdb67a9b250678686a0c3e03b0a9989\n"+
"0032want d82f291cde9987322c8a0c81a325e1ba6159684c\n0000"+
diff --git a/plumbing/transport/internal/common/common.go b/plumbing/transport/internal/common/common.go
index d0e9a29..5fdf425 100644
--- a/plumbing/transport/internal/common/common.go
+++ b/plumbing/transport/internal/common/common.go
@@ -11,7 +11,6 @@ import (
"errors"
"fmt"
"io"
- stdioutil "io/ioutil"
"strings"
"time"
@@ -156,7 +155,7 @@ func (c *client) listenFirstError(r io.Reader) chan string {
close(errLine)
}
- _, _ = io.Copy(stdioutil.Discard, r)
+ _, _ = io.Copy(io.Discard, r)
}()
return errLine
@@ -233,7 +232,7 @@ func (s *session) handleAdvRefDecodeError(err error) error {
// UploadPack performs a request to the server to fetch a packfile. A reader is
// returned with the packfile content. The reader must be closed after reading.
func (s *session) UploadPack(ctx context.Context, req *packp.UploadPackRequest) (*packp.UploadPackResponse, error) {
- if req.IsEmpty() && len(req.Shallows) == 0 {
+ if req.IsEmpty() {
return nil, transport.ErrEmptyUploadPackRequest
}
@@ -374,7 +373,7 @@ func (s *session) checkNotFoundError() error {
case <-t.C:
return ErrTimeoutExceeded
case line, ok := <-s.firstErrLine:
- if !ok {
+ if !ok || len(line) == 0 {
return nil
}
diff --git a/plumbing/transport/internal/common/common_test.go b/plumbing/transport/internal/common/common_test.go
index c60ef3b..affa787 100644
--- a/plumbing/transport/internal/common/common_test.go
+++ b/plumbing/transport/internal/common/common_test.go
@@ -76,3 +76,17 @@ func (s *CommonSuite) TestIsRepoNotFoundErrorForGogsAccessDenied(c *C) {
c.Assert(isRepoNotFound, Equals, true)
}
+
+func (s *CommonSuite) TestCheckNotFoundError(c *C) {
+ firstErrLine := make(chan string, 1)
+
+ session := session{
+ firstErrLine: firstErrLine,
+ }
+
+ firstErrLine <- ""
+
+ err := session.checkNotFoundError()
+
+ c.Assert(err, IsNil)
+}
diff --git a/plumbing/transport/server/server.go b/plumbing/transport/server/server.go
index 8ab70fe..11fa0c8 100644
--- a/plumbing/transport/server/server.go
+++ b/plumbing/transport/server/server.go
@@ -189,7 +189,7 @@ func (s *upSession) objectsToUpload(req *packp.UploadPackRequest) ([]plumbing.Ha
}
func (*upSession) setSupportedCapabilities(c *capability.List) error {
- if err := c.Set(capability.Agent, capability.DefaultAgent); err != nil {
+ if err := c.Set(capability.Agent, capability.DefaultAgent()); err != nil {
return err
}
@@ -355,7 +355,7 @@ func (s *rpSession) reportStatus() *packp.ReportStatus {
}
func (*rpSession) setSupportedCapabilities(c *capability.List) error {
- if err := c.Set(capability.Agent, capability.DefaultAgent); err != nil {
+ if err := c.Set(capability.Agent, capability.DefaultAgent()); err != nil {
return err
}
diff --git a/plumbing/transport/ssh/auth_method.go b/plumbing/transport/ssh/auth_method.go
index 3514669..ac4e358 100644
--- a/plumbing/transport/ssh/auth_method.go
+++ b/plumbing/transport/ssh/auth_method.go
@@ -3,17 +3,15 @@ package ssh
import (
"errors"
"fmt"
- "io/ioutil"
"os"
"os/user"
"path/filepath"
"github.com/go-git/go-git/v5/plumbing/transport"
- "github.com/mitchellh/go-homedir"
+ "github.com/skeema/knownhosts"
sshagent "github.com/xanzy/ssh-agent"
"golang.org/x/crypto/ssh"
- "golang.org/x/crypto/ssh/knownhosts"
)
const DefaultUsername = "git"
@@ -135,7 +133,7 @@ func NewPublicKeys(user string, pemBytes []byte, password string) (*PublicKeys,
// encoded private key. An encryption password should be given if the pemBytes
// contains a password encrypted PEM block otherwise password should be empty.
func NewPublicKeysFromFile(user, pemFile, password string) (*PublicKeys, error) {
- bytes, err := ioutil.ReadFile(pemFile)
+ bytes, err := os.ReadFile(pemFile)
if err != nil {
return nil, err
}
@@ -224,12 +222,19 @@ func (a *PublicKeysCallback) ClientConfig() (*ssh.ClientConfig, error) {
//
// If list of files is empty, then it will be read from the SSH_KNOWN_HOSTS
// environment variable, example:
-// /home/foo/custom_known_hosts_file:/etc/custom_known/hosts_file
+//
+// /home/foo/custom_known_hosts_file:/etc/custom_known/hosts_file
//
// If SSH_KNOWN_HOSTS is not set the following file locations will be used:
-// ~/.ssh/known_hosts
-// /etc/ssh/ssh_known_hosts
+//
+// ~/.ssh/known_hosts
+// /etc/ssh/ssh_known_hosts
func NewKnownHostsCallback(files ...string) (ssh.HostKeyCallback, error) {
+ kh, err := newKnownHosts(files...)
+ return ssh.HostKeyCallback(kh), err
+}
+
+func newKnownHosts(files ...string) (knownhosts.HostKeyCallback, error) {
var err error
if len(files) == 0 {
@@ -251,7 +256,7 @@ func getDefaultKnownHostsFiles() ([]string, error) {
return files, nil
}
- homeDirPath, err := homedir.Dir()
+ homeDirPath, err := os.UserHomeDir()
if err != nil {
return nil, err
}
diff --git a/plumbing/transport/ssh/common.go b/plumbing/transport/ssh/common.go
index 46e7913..1531603 100644
--- a/plumbing/transport/ssh/common.go
+++ b/plumbing/transport/ssh/common.go
@@ -4,12 +4,14 @@ package ssh
import (
"context"
"fmt"
+ "net"
"reflect"
"strconv"
"strings"
"github.com/go-git/go-git/v5/plumbing/transport"
"github.com/go-git/go-git/v5/plumbing/transport/internal/common"
+ "github.com/skeema/knownhosts"
"github.com/kevinburke/ssh_config"
"golang.org/x/crypto/ssh"
@@ -121,10 +123,24 @@ func (c *command) connect() error {
if err != nil {
return err
}
+ hostWithPort := c.getHostWithPort()
+ if config.HostKeyCallback == nil {
+ kh, err := newKnownHosts()
+ if err != nil {
+ return err
+ }
+ config.HostKeyCallback = kh.HostKeyCallback()
+ config.HostKeyAlgorithms = kh.HostKeyAlgorithms(hostWithPort)
+ } else if len(config.HostKeyAlgorithms) == 0 {
+ // Set the HostKeyAlgorithms based on HostKeyCallback.
+ // For background see https://github.com/go-git/go-git/issues/411 as well as
+ // https://github.com/golang/go/issues/29286 for root cause.
+ config.HostKeyAlgorithms = knownhosts.HostKeyAlgorithms(config.HostKeyCallback, hostWithPort)
+ }
overrideConfig(c.config, config)
- c.client, err = dial("tcp", c.getHostWithPort(), config)
+ c.client, err = dial("tcp", hostWithPort, c.endpoint.Proxy, config)
if err != nil {
return err
}
@@ -139,7 +155,7 @@ func (c *command) connect() error {
return nil
}
-func dial(network, addr string, config *ssh.ClientConfig) (*ssh.Client, error) {
+func dial(network, addr string, proxyOpts transport.ProxyOptions, config *ssh.ClientConfig) (*ssh.Client, error) {
var (
ctx = context.Background()
cancel context.CancelFunc
@@ -151,10 +167,33 @@ func dial(network, addr string, config *ssh.ClientConfig) (*ssh.Client, error) {
}
defer cancel()
- conn, err := proxy.Dial(ctx, network, addr)
+ var conn net.Conn
+ var err error
+
+ if proxyOpts.URL != "" {
+ proxyUrl, err := proxyOpts.FullURL()
+ if err != nil {
+ return nil, err
+ }
+ dialer, err := proxy.FromURL(proxyUrl, proxy.Direct)
+ if err != nil {
+ return nil, err
+ }
+
+ // Try to use a ContextDialer, but fall back to a Dialer if that goes south.
+ ctxDialer, ok := dialer.(proxy.ContextDialer)
+ if !ok {
+ return nil, fmt.Errorf("expected ssh proxy dialer to be of type %s; got %s",
+ reflect.TypeOf(ctxDialer), reflect.TypeOf(dialer))
+ }
+ conn, err = ctxDialer.DialContext(ctx, "tcp", addr)
+ } else {
+ conn, err = proxy.Dial(ctx, network, addr)
+ }
if err != nil {
return nil, err
}
+
c, chans, reqs, err := ssh.NewClientConn(conn, addr, config)
if err != nil {
return nil, err
@@ -173,7 +212,7 @@ func (c *command) getHostWithPort() string {
port = DefaultPort
}
- return fmt.Sprintf("%s:%d", host, port)
+ return net.JoinHostPort(host, strconv.Itoa(port))
}
func (c *command) doGetHostWithPortFromSSHConfig() (addr string, found bool) {
@@ -201,7 +240,7 @@ func (c *command) doGetHostWithPortFromSSHConfig() (addr string, found bool) {
}
}
- addr = fmt.Sprintf("%s:%d", host, port)
+ addr = net.JoinHostPort(host, strconv.Itoa(port))
return
}
diff --git a/plumbing/transport/ssh/common_test.go b/plumbing/transport/ssh/common_test.go
index 6d634d5..496e82d 100644
--- a/plumbing/transport/ssh/common_test.go
+++ b/plumbing/transport/ssh/common_test.go
@@ -5,23 +5,25 @@ import (
"github.com/go-git/go-git/v5/plumbing/transport"
+ "github.com/gliderlabs/ssh"
"github.com/kevinburke/ssh_config"
- "golang.org/x/crypto/ssh"
+ stdssh "golang.org/x/crypto/ssh"
+ "golang.org/x/crypto/ssh/testdata"
. "gopkg.in/check.v1"
)
func Test(t *testing.T) { TestingT(t) }
func (s *SuiteCommon) TestOverrideConfig(c *C) {
- config := &ssh.ClientConfig{
+ config := &stdssh.ClientConfig{
User: "foo",
- Auth: []ssh.AuthMethod{
- ssh.Password("yourpassword"),
+ Auth: []stdssh.AuthMethod{
+ stdssh.Password("yourpassword"),
},
- HostKeyCallback: ssh.FixedHostKey(nil),
+ HostKeyCallback: stdssh.FixedHostKey(nil),
}
- target := &ssh.ClientConfig{}
+ target := &stdssh.ClientConfig{}
overrideConfig(config, target)
c.Assert(target.User, Equals, "foo")
@@ -30,11 +32,11 @@ func (s *SuiteCommon) TestOverrideConfig(c *C) {
}
func (s *SuiteCommon) TestOverrideConfigKeep(c *C) {
- config := &ssh.ClientConfig{
+ config := &stdssh.ClientConfig{
User: "foo",
}
- target := &ssh.ClientConfig{
+ target := &stdssh.ClientConfig{
User: "bar",
}
@@ -93,12 +95,69 @@ func (s *SuiteCommon) TestDefaultSSHConfigWildcard(c *C) {
c.Assert(cmd.getHostWithPort(), Equals, "github.com:22")
}
+func (s *SuiteCommon) TestIgnoreHostKeyCallback(c *C) {
+ uploadPack := &UploadPackSuite{
+ opts: []ssh.Option{
+ ssh.HostKeyPEM(testdata.PEMBytes["ed25519"]),
+ },
+ }
+ uploadPack.SetUpSuite(c)
+ // Use the default client, which does not have a host key callback
+ uploadPack.Client = DefaultClient
+ auth, err := NewPublicKeys("foo", testdata.PEMBytes["rsa"], "")
+ c.Assert(err, IsNil)
+ c.Assert(auth, NotNil)
+ auth.HostKeyCallback = stdssh.InsecureIgnoreHostKey()
+ ep := uploadPack.newEndpoint(c, "bar.git")
+ ps, err := uploadPack.Client.NewUploadPackSession(ep, auth)
+ c.Assert(err, IsNil)
+ c.Assert(ps, NotNil)
+}
+
+func (s *SuiteCommon) TestFixedHostKeyCallback(c *C) {
+ hostKey, err := stdssh.ParsePrivateKey(testdata.PEMBytes["ed25519"])
+ c.Assert(err, IsNil)
+ uploadPack := &UploadPackSuite{
+ opts: []ssh.Option{
+ ssh.HostKeyPEM(testdata.PEMBytes["ed25519"]),
+ },
+ }
+ uploadPack.SetUpSuite(c)
+ // Use the default client, which does not have a host key callback
+ uploadPack.Client = DefaultClient
+ auth, err := NewPublicKeys("foo", testdata.PEMBytes["rsa"], "")
+ c.Assert(err, IsNil)
+ c.Assert(auth, NotNil)
+ auth.HostKeyCallback = stdssh.FixedHostKey(hostKey.PublicKey())
+ ep := uploadPack.newEndpoint(c, "bar.git")
+ ps, err := uploadPack.Client.NewUploadPackSession(ep, auth)
+ c.Assert(err, IsNil)
+ c.Assert(ps, NotNil)
+}
+
+func (s *SuiteCommon) TestFailHostKeyCallback(c *C) {
+ uploadPack := &UploadPackSuite{
+ opts: []ssh.Option{
+ ssh.HostKeyPEM(testdata.PEMBytes["ed25519"]),
+ },
+ }
+ uploadPack.SetUpSuite(c)
+ // Use the default client, which does not have a host key callback
+ uploadPack.Client = DefaultClient
+ auth, err := NewPublicKeys("foo", testdata.PEMBytes["rsa"], "")
+ c.Assert(err, IsNil)
+ c.Assert(auth, NotNil)
+ ep := uploadPack.newEndpoint(c, "bar.git")
+ _, err = uploadPack.Client.NewUploadPackSession(ep, auth)
+ c.Assert(err, NotNil)
+}
+
func (s *SuiteCommon) TestIssue70(c *C) {
uploadPack := &UploadPackSuite{}
uploadPack.SetUpSuite(c)
- config := &ssh.ClientConfig{
- HostKeyCallback: ssh.InsecureIgnoreHostKey(),
+ config := &stdssh.ClientConfig{
+ HostKeyCallback: stdssh.InsecureIgnoreHostKey(),
}
r := &runner{
config: config,
diff --git a/plumbing/transport/ssh/internal/test/proxy_test.go b/plumbing/transport/ssh/internal/test/proxy_test.go
new file mode 100644
index 0000000..8e775f8
--- /dev/null
+++ b/plumbing/transport/ssh/internal/test/proxy_test.go
@@ -0,0 +1,112 @@
+package test
+
+import (
+ "context"
+ "fmt"
+ "log"
+ "net"
+ "os"
+ "path/filepath"
+ "sync/atomic"
+ "testing"
+
+ "github.com/armon/go-socks5"
+ "github.com/gliderlabs/ssh"
+ "github.com/go-git/go-git/v5/plumbing/transport"
+ ggssh "github.com/go-git/go-git/v5/plumbing/transport/ssh"
+
+ fixtures "github.com/go-git/go-git-fixtures/v4"
+ stdssh "golang.org/x/crypto/ssh"
+ . "gopkg.in/check.v1"
+)
+
+func Test(t *testing.T) { TestingT(t) }
+
+type ProxyEnvSuite struct {
+ fixtures.Suite
+ port int
+ base string
+}
+
+var _ = Suite(&ProxyEnvSuite{})
+
+var socksProxiedRequests int32
+
+// This test tests proxy support via an env var, i.e. `ALL_PROXY`.
+// Its located in a separate package because golang caches the value
+// of proxy env vars leading to misleading/unexpected test results.
+func (s *ProxyEnvSuite) TestCommand(c *C) {
+ socksListener, err := net.Listen("tcp", "localhost:0")
+ c.Assert(err, IsNil)
+
+ socksServer, err := socks5.New(&socks5.Config{
+ Rules: TestProxyRule{},
+ })
+ c.Assert(err, IsNil)
+ go func() {
+ socksServer.Serve(socksListener)
+ }()
+ socksProxyAddr := fmt.Sprintf("socks5://localhost:%d", socksListener.Addr().(*net.TCPAddr).Port)
+ os.Setenv("ALL_PROXY", socksProxyAddr)
+ defer os.Unsetenv("ALL_PROXY")
+
+ sshListener, err := net.Listen("tcp", "localhost:0")
+ c.Assert(err, IsNil)
+ sshServer := &ssh.Server{Handler: HandlerSSH}
+ go func() {
+ log.Fatal(sshServer.Serve(sshListener))
+ }()
+
+ s.port = sshListener.Addr().(*net.TCPAddr).Port
+ s.base, err = os.MkdirTemp(os.TempDir(), fmt.Sprintf("go-git-ssh-%d", s.port))
+ c.Assert(err, IsNil)
+
+ ggssh.DefaultAuthBuilder = func(user string) (ggssh.AuthMethod, error) {
+ return &ggssh.Password{User: user}, nil
+ }
+
+ ep := s.prepareRepository(c, fixtures.Basic().One(), "basic.git")
+ c.Assert(err, IsNil)
+
+ client := ggssh.NewClient(&stdssh.ClientConfig{
+ HostKeyCallback: stdssh.InsecureIgnoreHostKey(),
+ })
+ r, err := client.NewUploadPackSession(ep, nil)
+ c.Assert(err, IsNil)
+ defer func() { c.Assert(r.Close(), IsNil) }()
+
+ info, err := r.AdvertisedReferences()
+ c.Assert(err, IsNil)
+ c.Assert(info, NotNil)
+ proxyUsed := atomic.LoadInt32(&socksProxiedRequests) > 0
+ c.Assert(proxyUsed, Equals, true)
+}
+
+func (s *ProxyEnvSuite) prepareRepository(c *C, f *fixtures.Fixture, name string) *transport.Endpoint {
+ fs := f.DotGit()
+
+ err := fixtures.EnsureIsBare(fs)
+ c.Assert(err, IsNil)
+
+ path := filepath.Join(s.base, name)
+ err = os.Rename(fs.Root(), path)
+ c.Assert(err, IsNil)
+
+ return s.newEndpoint(c, name)
+}
+
+func (s *ProxyEnvSuite) newEndpoint(c *C, name string) *transport.Endpoint {
+ ep, err := transport.NewEndpoint(fmt.Sprintf(
+ "ssh://git@localhost:%d/%s/%s", s.port, filepath.ToSlash(s.base), name,
+ ))
+
+ c.Assert(err, IsNil)
+ return ep
+}
+
+type TestProxyRule struct{}
+
+func (dr TestProxyRule) Allow(ctx context.Context, req *socks5.Request) (context.Context, bool) {
+ atomic.AddInt32(&socksProxiedRequests, 1)
+ return ctx, true
+}
diff --git a/plumbing/transport/ssh/internal/test/test_utils.go b/plumbing/transport/ssh/internal/test/test_utils.go
new file mode 100644
index 0000000..c3797b1
--- /dev/null
+++ b/plumbing/transport/ssh/internal/test/test_utils.go
@@ -0,0 +1,83 @@
+package test
+
+import (
+ "fmt"
+ "io"
+ "os/exec"
+ "runtime"
+ "strings"
+ "sync"
+
+ "github.com/gliderlabs/ssh"
+)
+
+func HandlerSSH(s ssh.Session) {
+ cmd, stdin, stderr, stdout, err := buildCommand(s.Command())
+ if err != nil {
+ fmt.Println(err)
+ return
+ }
+
+ if err := cmd.Start(); err != nil {
+ fmt.Println(err)
+ return
+ }
+
+ go func() {
+ defer stdin.Close()
+ io.Copy(stdin, s)
+ }()
+
+ var wg sync.WaitGroup
+ wg.Add(2)
+
+ go func() {
+ defer wg.Done()
+ io.Copy(s.Stderr(), stderr)
+ }()
+
+ go func() {
+ defer wg.Done()
+ io.Copy(s, stdout)
+ }()
+
+ wg.Wait()
+
+ if err := cmd.Wait(); err != nil {
+ return
+ }
+
+}
+
+func buildCommand(c []string) (cmd *exec.Cmd, stdin io.WriteCloser, stderr, stdout io.ReadCloser, err error) {
+ if len(c) != 2 {
+ err = fmt.Errorf("invalid command")
+ return
+ }
+
+ // fix for Windows environments
+ var path string
+ if runtime.GOOS == "windows" {
+ path = strings.Replace(c[1], "/C:/", "C:/", 1)
+ } else {
+ path = c[1]
+ }
+
+ cmd = exec.Command(c[0], path)
+ stdout, err = cmd.StdoutPipe()
+ if err != nil {
+ return
+ }
+
+ stdin, err = cmd.StdinPipe()
+ if err != nil {
+ return
+ }
+
+ stderr, err = cmd.StderrPipe()
+ if err != nil {
+ return
+ }
+
+ return
+}
diff --git a/plumbing/transport/ssh/proxy_test.go b/plumbing/transport/ssh/proxy_test.go
index 3caf1ff..2ba98e8 100644
--- a/plumbing/transport/ssh/proxy_test.go
+++ b/plumbing/transport/ssh/proxy_test.go
@@ -1,36 +1,87 @@
package ssh
import (
+ "context"
"fmt"
"log"
"net"
"os"
+ "sync/atomic"
"github.com/armon/go-socks5"
+ "github.com/gliderlabs/ssh"
+ "github.com/go-git/go-git/v5/plumbing/transport"
+ "github.com/go-git/go-git/v5/plumbing/transport/ssh/internal/test"
+
+ fixtures "github.com/go-git/go-git-fixtures/v4"
+ stdssh "golang.org/x/crypto/ssh"
. "gopkg.in/check.v1"
)
type ProxySuite struct {
- UploadPackSuite
+ u UploadPackSuite
+ fixtures.Suite
}
var _ = Suite(&ProxySuite{})
-func (s *ProxySuite) SetUpSuite(c *C) {
- s.UploadPackSuite.SetUpSuite(c)
+var socksProxiedRequests int32
- l, err := net.Listen("tcp", "localhost:0")
+func (s *ProxySuite) TestCommand(c *C) {
+ socksListener, err := net.Listen("tcp", "localhost:0")
c.Assert(err, IsNil)
- server, err := socks5.New(&socks5.Config{})
+ socksServer, err := socks5.New(&socks5.Config{
+ AuthMethods: []socks5.Authenticator{socks5.UserPassAuthenticator{
+ Credentials: socks5.StaticCredentials{
+ "user": "pass",
+ },
+ }},
+ Rules: TestProxyRule{},
+ })
c.Assert(err, IsNil)
+ go func() {
+ socksServer.Serve(socksListener)
+ }()
+ socksProxyAddr := fmt.Sprintf("socks5://localhost:%d", socksListener.Addr().(*net.TCPAddr).Port)
- port := l.Addr().(*net.TCPAddr).Port
-
- err = os.Setenv("ALL_PROXY", fmt.Sprintf("socks5://localhost:%d", port))
+ sshListener, err := net.Listen("tcp", "localhost:0")
c.Assert(err, IsNil)
-
+ sshServer := &ssh.Server{Handler: test.HandlerSSH}
go func() {
- log.Fatal(server.Serve(l))
+ log.Fatal(sshServer.Serve(sshListener))
}()
+
+ s.u.port = sshListener.Addr().(*net.TCPAddr).Port
+ s.u.base, err = os.MkdirTemp(os.TempDir(), fmt.Sprintf("go-git-ssh-%d", s.u.port))
+ c.Assert(err, IsNil)
+
+ DefaultAuthBuilder = func(user string) (AuthMethod, error) {
+ return &Password{User: user}, nil
+ }
+
+ ep := s.u.prepareRepository(c, fixtures.Basic().One(), "basic.git")
+ c.Assert(err, IsNil)
+ ep.Proxy = transport.ProxyOptions{
+ URL: socksProxyAddr,
+ Username: "user",
+ Password: "pass",
+ }
+
+ runner := runner{
+ config: &stdssh.ClientConfig{
+ HostKeyCallback: stdssh.InsecureIgnoreHostKey(),
+ },
+ }
+ _, err = runner.Command(transport.UploadPackServiceName, ep, nil)
+ c.Assert(err, IsNil)
+ proxyUsed := atomic.LoadInt32(&socksProxiedRequests) > 0
+ c.Assert(proxyUsed, Equals, true)
+}
+
+type TestProxyRule struct{}
+
+func (dr TestProxyRule) Allow(ctx context.Context, req *socks5.Request) (context.Context, bool) {
+ atomic.AddInt32(&socksProxiedRequests, 1)
+ return ctx, true
}
diff --git a/plumbing/transport/ssh/upload_pack_test.go b/plumbing/transport/ssh/upload_pack_test.go
index e65e04a..67af566 100644
--- a/plumbing/transport/ssh/upload_pack_test.go
+++ b/plumbing/transport/ssh/upload_pack_test.go
@@ -3,7 +3,6 @@ package ssh
import (
"fmt"
"io"
- "io/ioutil"
"log"
"net"
"os"
@@ -14,6 +13,7 @@ import (
"sync"
"github.com/go-git/go-git/v5/plumbing/transport"
+ testutils "github.com/go-git/go-git/v5/plumbing/transport/ssh/internal/test"
"github.com/go-git/go-git/v5/plumbing/transport/test"
"github.com/gliderlabs/ssh"
@@ -25,6 +25,7 @@ import (
type UploadPackSuite struct {
test.UploadPackSuite
fixtures.Suite
+ opts []ssh.Option
port int
base string
@@ -41,7 +42,7 @@ func (s *UploadPackSuite) SetUpSuite(c *C) {
c.Assert(err, IsNil)
s.port = l.Addr().(*net.TCPAddr).Port
- s.base, err = ioutil.TempDir(os.TempDir(), fmt.Sprintf("go-git-ssh-%d", s.port))
+ s.base, err = os.MkdirTemp(os.TempDir(), fmt.Sprintf("go-git-ssh-%d", s.port))
c.Assert(err, IsNil)
DefaultAuthBuilder = func(user string) (AuthMethod, error) {
@@ -56,7 +57,10 @@ func (s *UploadPackSuite) SetUpSuite(c *C) {
s.UploadPackSuite.EmptyEndpoint = s.prepareRepository(c, fixtures.ByTag("empty").One(), "empty.git")
s.UploadPackSuite.NonExistentEndpoint = s.newEndpoint(c, "non-existent.git")
- server := &ssh.Server{Handler: handlerSSH}
+ server := &ssh.Server{Handler: testutils.HandlerSSH}
+ for _, opt := range s.opts {
+ opt(server)
+ }
go func() {
log.Fatal(server.Serve(l))
}()
diff --git a/plumbing/transport/test/receive_pack.go b/plumbing/transport/test/receive_pack.go
index 018d38e..9414fba 100644
--- a/plumbing/transport/test/receive_pack.go
+++ b/plumbing/transport/test/receive_pack.go
@@ -1,13 +1,11 @@
// Package test implements common test suite for different transport
// implementations.
-//
package test
import (
"bytes"
"context"
"io"
- "io/ioutil"
"os"
"path/filepath"
@@ -235,7 +233,7 @@ func (s *ReceivePackSuite) receivePackNoCheck(c *C, ep *transport.Endpoint,
if rootPath != "" && err == nil && stat.IsDir() {
objectPath := filepath.Join(rootPath, "objects/pack")
- files, err := ioutil.ReadDir(objectPath)
+ files, err := os.ReadDir(objectPath)
c.Assert(err, IsNil)
for _, file := range files {
@@ -371,5 +369,5 @@ func (s *ReceivePackSuite) emptyPackfile() io.ReadCloser {
panic(err)
}
- return ioutil.NopCloser(&buf)
+ return io.NopCloser(&buf)
}
diff --git a/plumbing/transport/test/upload_pack.go b/plumbing/transport/test/upload_pack.go
index 3ee029d..f7842eb 100644
--- a/plumbing/transport/test/upload_pack.go
+++ b/plumbing/transport/test/upload_pack.go
@@ -1,13 +1,11 @@
// Package test implements common test suite for different transport
// implementations.
-//
package test
import (
"bytes"
"context"
"io"
- "io/ioutil"
"time"
"github.com/go-git/go-git/v5/plumbing"
@@ -154,7 +152,7 @@ func (s *UploadPackSuite) TestUploadPackWithContextOnRead(c *C) {
cancel()
- _, err = io.Copy(ioutil.Discard, reader)
+ _, err = io.Copy(io.Discard, reader)
c.Assert(err, NotNil)
err = reader.Close()
@@ -255,7 +253,7 @@ func (s *UploadPackSuite) TestFetchError(c *C) {
}
func (s *UploadPackSuite) checkObjectNumber(c *C, r io.Reader, n int) {
- b, err := ioutil.ReadAll(r)
+ b, err := io.ReadAll(r)
c.Assert(err, IsNil)
buf := bytes.NewBuffer(b)
storage := memory.NewStorage()
diff --git a/references.go b/references.go
deleted file mode 100644
index 6d96035..0000000
--- a/references.go
+++ /dev/null
@@ -1,264 +0,0 @@
-package git
-
-import (
- "io"
- "sort"
-
- "github.com/go-git/go-git/v5/plumbing"
- "github.com/go-git/go-git/v5/plumbing/object"
- "github.com/go-git/go-git/v5/utils/diff"
-
- "github.com/sergi/go-diff/diffmatchpatch"
-)
-
-// References returns a slice of Commits for the file at "path", starting from
-// the commit provided that contains the file from the provided path. The last
-// commit into the returned slice is the commit where the file was created.
-// If the provided commit does not contains the specified path, a nil slice is
-// returned. The commits are sorted in commit order, newer to older.
-//
-// Caveats:
-//
-// - Moves and copies are not currently supported.
-//
-// - Cherry-picks are not detected unless there are no commits between them and
-// therefore can appear repeated in the list. (see git path-id for hints on how
-// to fix this).
-func references(c *object.Commit, path string) ([]*object.Commit, error) {
- var result []*object.Commit
- seen := make(map[plumbing.Hash]struct{})
- if err := walkGraph(&result, &seen, c, path); err != nil {
- return nil, err
- }
-
- // TODO result should be returned without ordering
- sortCommits(result)
-
- // for merges of identical cherry-picks
- return removeComp(path, result, equivalent)
-}
-
-type commitSorterer struct {
- l []*object.Commit
-}
-
-func (s commitSorterer) Len() int {
- return len(s.l)
-}
-
-func (s commitSorterer) Less(i, j int) bool {
- return s.l[i].Committer.When.Before(s.l[j].Committer.When) ||
- s.l[i].Committer.When.Equal(s.l[j].Committer.When) &&
- s.l[i].Author.When.Before(s.l[j].Author.When)
-}
-
-func (s commitSorterer) Swap(i, j int) {
- s.l[i], s.l[j] = s.l[j], s.l[i]
-}
-
-// SortCommits sorts a commit list by commit date, from older to newer.
-func sortCommits(l []*object.Commit) {
- s := &commitSorterer{l}
- sort.Sort(s)
-}
-
-// Recursive traversal of the commit graph, generating a linear history of the
-// path.
-func walkGraph(result *[]*object.Commit, seen *map[plumbing.Hash]struct{}, current *object.Commit, path string) error {
- // check and update seen
- if _, ok := (*seen)[current.Hash]; ok {
- return nil
- }
- (*seen)[current.Hash] = struct{}{}
-
- // if the path is not in the current commit, stop searching.
- if _, err := current.File(path); err != nil {
- return nil
- }
-
- // optimization: don't traverse branches that does not
- // contain the path.
- parents, err := parentsContainingPath(path, current)
- if err != nil {
- return err
- }
- switch len(parents) {
- // if the path is not found in any of its parents, the path was
- // created by this commit; we must add it to the revisions list and
- // stop searching. This includes the case when current is the
- // initial commit.
- case 0:
- *result = append(*result, current)
- return nil
- case 1: // only one parent contains the path
- // if the file contents has change, add the current commit
- different, err := differentContents(path, current, parents)
- if err != nil {
- return err
- }
- if len(different) == 1 {
- *result = append(*result, current)
- }
- // in any case, walk the parent
- return walkGraph(result, seen, parents[0], path)
- default: // more than one parent contains the path
- // TODO: detect merges that had a conflict, because they must be
- // included in the result here.
- for _, p := range parents {
- err := walkGraph(result, seen, p, path)
- if err != nil {
- return err
- }
- }
- }
- return nil
-}
-
-func parentsContainingPath(path string, c *object.Commit) ([]*object.Commit, error) {
- // TODO: benchmark this method making git.object.Commit.parent public instead of using
- // an iterator
- var result []*object.Commit
- iter := c.Parents()
- for {
- parent, err := iter.Next()
- if err == io.EOF {
- return result, nil
- }
- if err != nil {
- return nil, err
- }
- if _, err := parent.File(path); err == nil {
- result = append(result, parent)
- }
- }
-}
-
-// Returns an slice of the commits in "cs" that has the file "path", but with different
-// contents than what can be found in "c".
-func differentContents(path string, c *object.Commit, cs []*object.Commit) ([]*object.Commit, error) {
- result := make([]*object.Commit, 0, len(cs))
- h, found := blobHash(path, c)
- if !found {
- return nil, object.ErrFileNotFound
- }
- for _, cx := range cs {
- if hx, found := blobHash(path, cx); found && h != hx {
- result = append(result, cx)
- }
- }
- return result, nil
-}
-
-// blobHash returns the hash of a path in a commit
-func blobHash(path string, commit *object.Commit) (hash plumbing.Hash, found bool) {
- file, err := commit.File(path)
- if err != nil {
- var empty plumbing.Hash
- return empty, found
- }
- return file.Hash, true
-}
-
-type contentsComparatorFn func(path string, a, b *object.Commit) (bool, error)
-
-// Returns a new slice of commits, with duplicates removed. Expects a
-// sorted commit list. Duplication is defined according to "comp". It
-// will always keep the first commit of a series of duplicated commits.
-func removeComp(path string, cs []*object.Commit, comp contentsComparatorFn) ([]*object.Commit, error) {
- result := make([]*object.Commit, 0, len(cs))
- if len(cs) == 0 {
- return result, nil
- }
- result = append(result, cs[0])
- for i := 1; i < len(cs); i++ {
- equals, err := comp(path, cs[i], cs[i-1])
- if err != nil {
- return nil, err
- }
- if !equals {
- result = append(result, cs[i])
- }
- }
- return result, nil
-}
-
-// Equivalent commits are commits whose patch is the same.
-func equivalent(path string, a, b *object.Commit) (bool, error) {
- numParentsA := a.NumParents()
- numParentsB := b.NumParents()
-
- // the first commit is not equivalent to anyone
- // and "I think" merges can not be equivalent to anything
- if numParentsA != 1 || numParentsB != 1 {
- return false, nil
- }
-
- diffsA, err := patch(a, path)
- if err != nil {
- return false, err
- }
- diffsB, err := patch(b, path)
- if err != nil {
- return false, err
- }
-
- return sameDiffs(diffsA, diffsB), nil
-}
-
-func patch(c *object.Commit, path string) ([]diffmatchpatch.Diff, error) {
- // get contents of the file in the commit
- file, err := c.File(path)
- if err != nil {
- return nil, err
- }
- content, err := file.Contents()
- if err != nil {
- return nil, err
- }
-
- // get contents of the file in the first parent of the commit
- var contentParent string
- iter := c.Parents()
- parent, err := iter.Next()
- if err != nil {
- return nil, err
- }
- file, err = parent.File(path)
- if err != nil {
- contentParent = ""
- } else {
- contentParent, err = file.Contents()
- if err != nil {
- return nil, err
- }
- }
-
- // compare the contents of parent and child
- return diff.Do(content, contentParent), nil
-}
-
-func sameDiffs(a, b []diffmatchpatch.Diff) bool {
- if len(a) != len(b) {
- return false
- }
- for i := range a {
- if !sameDiff(a[i], b[i]) {
- return false
- }
- }
- return true
-}
-
-func sameDiff(a, b diffmatchpatch.Diff) bool {
- if a.Type != b.Type {
- return false
- }
- switch a.Type {
- case 0:
- return countLines(a.Text) == countLines(b.Text)
- case 1, -1:
- return a.Text == b.Text
- default:
- panic("unreachable")
- }
-}
diff --git a/references_test.go b/references_test.go
deleted file mode 100644
index 28d1bb9..0000000
--- a/references_test.go
+++ /dev/null
@@ -1,401 +0,0 @@
-package git
-
-import (
- "bytes"
- "fmt"
-
- "github.com/go-git/go-git/v5/plumbing"
- "github.com/go-git/go-git/v5/plumbing/object"
- "github.com/go-git/go-git/v5/storage/memory"
-
- fixtures "github.com/go-git/go-git-fixtures/v4"
- . "gopkg.in/check.v1"
-)
-
-type ReferencesSuite struct {
- BaseSuite
-}
-
-var _ = Suite(&ReferencesSuite{})
-
-var referencesTests = [...]struct {
- // input data to revlist
- repo string
- commit string
- path string
- // expected output data form the revlist
- revs []string
-}{
- // Tyba git-fixture
- {"https://github.com/git-fixtures/basic.git", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "binary.jpg", []string{
- "35e85108805c84807bc66a02d91535e1e24b38b9",
- }},
- {"https://github.com/git-fixtures/basic.git", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "CHANGELOG", []string{
- "b8e471f58bcbca63b07bda20e428190409c2db47",
- }},
- {"https://github.com/git-fixtures/basic.git", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "go/example.go", []string{
- "918c48b83bd081e863dbe1b80f8998f058cd8294",
- }},
- {"https://github.com/git-fixtures/basic.git", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "json/long.json", []string{
- "af2d6a6954d532f8ffb47615169c8fdf9d383a1a",
- }},
- {"https://github.com/git-fixtures/basic.git", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "json/short.json", []string{
- "af2d6a6954d532f8ffb47615169c8fdf9d383a1a",
- }},
- {"https://github.com/git-fixtures/basic.git", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "LICENSE", []string{
- "b029517f6300c2da0f4b651b8642506cd6aaf45d",
- }},
- {"https://github.com/git-fixtures/basic.git", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "php/crappy.php", []string{
- "918c48b83bd081e863dbe1b80f8998f058cd8294",
- }},
- {"https://github.com/git-fixtures/basic.git", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "vendor/foo.go", []string{
- "6ecf0ef2c2dffb796033e5a02219af86ec6584e5",
- }},
- {"https://github.com/jamesob/desk.git", "d4edaf0e8101fcea437ebd982d899fe2cc0f9f7b", "LICENSE", []string{
- "ffcda27c2de6768ee83f3f4a027fa4ab57d50f09",
- }},
- {"https://github.com/jamesob/desk.git", "d4edaf0e8101fcea437ebd982d899fe2cc0f9f7b", "README.md", []string{
- "ffcda27c2de6768ee83f3f4a027fa4ab57d50f09",
- "2e87a2dcc63a115f9a61bd969d1e85fb132a431b",
- "215b0ac06225b0671bc3460d10da88c3406f796f",
- "0260eb7a2623dd2309ab439f74e8681fccdc4285",
- "d46b48933e94f30992486374fa9a6becfd28ea17",
- "9cb4df2a88efee8836f9b8ad27ca2717f624164e",
- "8c49acdec2ed441706d8799f8b17878aae4c1ffe",
- "ebaca0c6f54c23193ee8175c3530e370cb2dabe3",
- "77675f82039551a19de4fbccbe69366fe63680df",
- "b9741594fb8ab7374f9be07d6a09a3bf96719816",
- "04db6acd94de714ca48128c606b17ee1149a630e",
- "ff737bd8a962a714a446d7592fae423a56e61e12",
- "eadd03f7a1cc54810bd10eef6747ad9562ad246d",
- "b5072ab5c1cf89191d71f1244eecc5d1f369ef7e",
- "bfa6ebc9948f1939402b063c0a2a24bf2b1c1cc3",
- "d9aef39828c670dfdb172502021a2ebcda8cf2fb",
- "1a6b6e45c91e1831494eb139ee3f8e21649c7fb0",
- "09fdbe4612066cf63ea46aee43c7cfaaff02ecfb",
- "236f6526b1150cc1f1723566b4738f443fc70777",
- "7862953f470b62397d22f6782a884f5bea6d760d",
- "b0b0152d08c2333680266977a5bc9c4e50e1e968",
- "13ce6c1c77c831f381974aa1c62008a414bd2b37",
- "d3f3c8faca048d11709969fbfc0cdf2901b87578",
- "8777dde1abe18c805d021366643218d3f3356dd9",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "pylib/spinnaker/reconfigure_spinnaker.py", []string{
- "a24001f6938d425d0e7504bdf5d27fc866a85c3d",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "pylib/spinnaker/validate_configuration.py", []string{
- "a24001f6938d425d0e7504bdf5d27fc866a85c3d",
- "1e14f94bcf82694fdc7e2dcbbfdbbed58db0f4d9",
- "1e3d328a2cabda5d0aaddc5dec65271343e0dc37",
- "b5d999e2986e190d81767cd3cfeda0260f9f6fb8",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "pylib/spinnaker/fetch.py", []string{
- "a24001f6938d425d0e7504bdf5d27fc866a85c3d",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "pylib/spinnaker/yaml_util.py", []string{
- "a24001f6938d425d0e7504bdf5d27fc866a85c3d",
- "1e14f94bcf82694fdc7e2dcbbfdbbed58db0f4d9",
- "b5d999e2986e190d81767cd3cfeda0260f9f6fb8",
- "023d4fb17b76e0fe0764971df8b8538b735a1d67",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "dev/build_release.py", []string{
- "a24001f6938d425d0e7504bdf5d27fc866a85c3d",
- "1e14f94bcf82694fdc7e2dcbbfdbbed58db0f4d9",
- "f42771ba298b93a7c4f5b16c5b30ab96c15305a8",
- "dd52703a50e71891f63fcf05df1f69836f4e7056",
- "0d9c9cef53af38cefcb6801bb492aaed3f2c9a42",
- "d375f1994ff4d0bdc32d614e698f1b50e1093f14",
- "abad497f11a366548aa95303c8c2f165fe7ae918",
- "6986d885626792dee4ef6b7474dfc9230c5bda54",
- "5422a86a10a8c5a1ef6728f5fc8894d9a4c54cb9",
- "09a4ea729b25714b6368959eea5113c99938f7b6",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "pkg_scripts/postUninstall.sh", []string{
- "ce9f123d790717599aaeb76bc62510de437761be",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "install/first_google_boot.sh", []string{
- "a24001f6938d425d0e7504bdf5d27fc866a85c3d",
- "de25f576b888569192e6442b0202d30ca7b2d8ec",
- "a596972a661d9a7deca8abd18b52ce1a39516e89",
- "9467ec579708b3c71dd9e5b3906772841c144a30",
- "c4a9091e4076cb740fa46e790dd5b658e19012ad",
- "6eb5d9c5225224bfe59c401182a2939d6c27fc00",
- "495c7118e7cf757aa04eab410b64bfb5b5149ad2",
- "dd2d03c19658ff96d371aef00e75e2e54702da0e",
- "2a3b1d3b134e937c7bafdab6cc2950e264bf5dee",
- "a57b08a9072f6a865f760551be2a4944f72f804a",
- "0777fadf4ca6f458d7071de414f9bd5417911037",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "install/install_spinnaker.sh", []string{
- "0d9c9cef53af38cefcb6801bb492aaed3f2c9a42",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "install/install_fake_openjdk8.sh", []string{
- "a24001f6938d425d0e7504bdf5d27fc866a85c3d",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "install/install_spinnaker.py", []string{
- "a24001f6938d425d0e7504bdf5d27fc866a85c3d",
- "37f94770d81232b1895fca447878f68d65aac652",
- "46c9dcbb55ca3f4735e82ad006e8cae2fdd050d9",
- "124a88cfda413cb7182ca9c739a284a9e50042a1",
- "eb4faf67a8b775d7985d07a708e3ffeac4273580",
- "0d9c9cef53af38cefcb6801bb492aaed3f2c9a42",
- "01171a8a2e843bef3a574ba73b258ac29e5d5405",
- "739d8c6fe16edcb6ef9185dc74197de561b84315",
- "d33c2d1e350b03fb989eefc612e8c9d5fa7cadc2",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "install/__init__.py", []string{
- "a24001f6938d425d0e7504bdf5d27fc866a85c3d",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "experimental/docker-compose/docker-compose.yml", []string{
- "fda357835d889595dc39dfebc6181d863cce7d4f",
- "57c59e7144354a76e1beba69ae2f85db6b1727af",
- "7682dff881029c722d893a112a64fea6849a0428",
- "66f1c938c380a4096674b27540086656076a597f",
- "56dc238f6f397e93f1d1aad702976889c830e8bf",
- "b95e442c064935709e789fa02126f17ddceef10b",
- "f98965a8f42037bd038b86c3401da7e6dfbf4f2e",
- "5344429749e8b68b168d2707b7903692436cc2ea",
- "6a31f5d219766b0cec4ea4fbbbfe47bdcdb0ab8e",
- "ddaae195b628150233b0a48f50a1674fd9d1a924",
- "7119ad9cf7d4e4d8b059e5337374baae4adc7458",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "unittest/validate_configuration_test.py", []string{
- "1e14f94bcf82694fdc7e2dcbbfdbbed58db0f4d9",
- "1e3d328a2cabda5d0aaddc5dec65271343e0dc37",
- }},
- {"https://github.com/spinnaker/spinnaker.git", "f39d86f59a0781f130e8de6b2115329c1fbe9545", "README.adoc", []string{
- "638f61b3331695f46f1a88095e26dea0f09f176b",
- "bd42370d3fe8d410e78acb96f81cb3d838ad1c21",
- "d6905eab6fec1841c7cf8e4484499f5c8d7d423e",
- "c0a70a0f5aa494f0ae01c55ba191f2325556489a",
- "811795c8a185e88f5d269195cb68b29c8d0fe170",
- "d6e6fe0194447cc280f942d6a2e0521b68ea7796",
- "174bdbf9edfb0ca88415dd4a673852d5b22e7036",
- "9944d6cf72b8f82d622d85dad7434472bc8f397d",
- "e805183c72f0426fb073728c01901c2fd2db1da6",
- "8ef83dd443a05e9122681950399edaa58a38d466",
- "d73f9cee49a5ad27a42a6e18af7c49a8f28ad8a8",
- }},
- // FAILS
- /*
- // this contains an empty move
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "google/dev/build_google_tarball.py", []string{
- "88e60ac93f832efc2616b3c165e99a8f2ffc3e0c",
- "9e49443da49b8c862cc140b660744f84eebcfa51",
- }},
- */
- /*
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "unittest/yaml_util_test.py", []string{
- "edf909edb9319c5e615e4ce73da47bbdca388ebe",
- "023d4fb17b76e0fe0764971df8b8538b735a1d67",
- }},
- */
- /*
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "unittest/configurator_test.py", []string{
- "1e14f94bcf82694fdc7e2dcbbfdbbed58db0f4d9",
- "edf909edb9319c5e615e4ce73da47bbdca388ebe",
- "d14f793a6cd7169ef708a4fc276ad876bd3edd4e",
- "023d4fb17b76e0fe0764971df8b8538b735a1d67",
- }},
- */
- /*
- // this contains a cherry-pick at 094d0e7d5d691 (with 3f34438d)
- {"https://github.com/jamesob/desk.git", "d4edaf0e8101fcea437ebd982d899fe2cc0f9f7b", "desk", []string{
- "ffcda27c2de6768ee83f3f4a027fa4ab57d50f09",
- "a0c1e853158ccbaf95574220bbf3b54509034a9f",
- "decfc524570c407d6bba0f217e534c8b47dbdbee",
- "1413872d5b3af7cd674bbe0e1f23387cd5d940e6",
- "40cd5a91d916e7b2f331e4e85fdc52636fd7cff7",
- "8e07d73aa0e3780f8c7cf8ad1a6b263df26a0a52",
- "19c56f95720ac3630efe9f29b1a252581d6cbc0c",
- "9ea46ccc6d253cffb4b7b66e936987d87de136e4",
- "094d0e7d5d69141c98a606910ba64786c5565da0",
- "801e62706a9e4fef75fcaca9c78744de0bc36e6a",
- "eddf335f31c73624ed3f40dc5fcad50136074b2b",
- "c659093f06eb2bd68c6252caeab605e5cd8aa49e",
- "d94b3fe8ce0e3a474874d742992d432cd040582f",
- "93cddf036df2d8509f910063696acd556ca7600f",
- "b3d4cb0c826b16b301f088581d681654d8de6c07",
- "52d90f9b513dd3c5330663cba39396e6b8a3ba4e",
- "15919e99ded03c6ceea9ff98558e77a322a4dadb",
- "803bf37847633e2f685a46a27b11facf22efebec",
- "c07ad524ee1e616c70bf2ea7a0ee4f4a01195d78",
- "b91aff30f318fda461d009c308490613b394f3e2",
- "67cec1e8a3f21c6eb11678e3f31ffd228b55b783",
- "bbe404c78af7525fabc57b9e7aa7c100b0d39f7a",
- "5dd078848786c2babc2511e9502fa98518cf3535",
- "7970ae7cc165c5205945dfb704d67d53031f550a",
- "33091ac904747747ff30f107d4d0f22fa872eccf",
- "069f81cab12d185ba1b509be946c47897cd4fb1f",
- "13ce6c1c77c831f381974aa1c62008a414bd2b37",
- }},
- */
- /*
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "InstallSpinnaker.sh", []string{
- "ce9f123d790717599aaeb76bc62510de437761be",
- "23673af3ad70b50bba7fdafadc2323302f5ba520",
- "b7015a5d36990d69a054482556127b9c7404a24a",
- "582da9622e3a72a19cd261a017276d72b5b0051a",
- "0c5bb1e4392e751f884f3c57de5d4aee72c40031",
- "c9c2a0ec03968ab17e8b16fdec9661eb1dbea173",
- "a3cdf880826b4d9af42b93f4a2df29a91ab31d35",
- "18526c447f5174d33c96aac6d6433318b0e2021c",
- "2a6288be1c8ea160c443ca3cd0fe826ff2387d37",
- "9e74d009894d73dd07773ea6b3bdd8323db980f7",
- "d2f6214b625db706384b378a29cc4c22237db97a",
- "202a9c720b3ba8106e022a0ad027ebe279040c78",
- "791bcd1592828d9d5d16e83f3a825fb08b0ba22d",
- "01e65d67eed8afcb67a6bdf1c962541f62b299c9",
- "6328ee836affafc1b52127147b5ca07300ac78e6",
- "3de4f77c105f700f50d9549d32b9a05a01b46c4b",
- "8980daf661408a3faa1f22c225702a5c1d11d5c9",
- "8eb116de9128c314ac8a6f5310ca500b8c74f5db",
- "88e841aad37b71b78a8fb88bc75fe69499d527c7",
- "370d61cdbc1f3c90db6759f1599ccbabd40ad6c1",
- "505577dc87d300cf562dc4702a05a5615d90d855",
- "b5c6053a46993b20d1b91e7b7206bffa54669ad7",
- "ba486de7c025457963701114c683dcd4708e1dee",
- "b41d7c0e5b20bbe7c8eb6606731a3ff68f4e3941",
- "a47d0aaeda421f06df248ad65bd58230766bf118",
- "495c7118e7cf757aa04eab410b64bfb5b5149ad2",
- "46670eb6477c353d837dbaba3cf36c5f8b86f037",
- "dd2d03c19658ff96d371aef00e75e2e54702da0e",
- "4bbcad219ec55a465fb48ce236cb10ca52d43b1f",
- "50d0556563599366f29cb286525780004fa5a317",
- "9a06d3f20eabb254d0a1e2ff7735ef007ccd595e",
- "d4b48a39aba7d3bd3e8abef2274a95b112d1ae73",
- }},
- */
- /*
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "config/default-spinnaker-local.yml", []string{
- "ae904e8d60228c21c47368f6a10f1cc9ca3aeebf",
- "99534ecc895fe17a1d562bb3049d4168a04d0865",
- "caf6d62e8285d4681514dd8027356fb019bc97ff",
- "eaf7614cad81e8ab5c813dd4821129d0c04ea449",
- "5a2a845bc08974a36d599a4a4b7e25be833823b0",
- "41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c",
- "974b775a8978b120ff710cac93a21c7387b914c9",
- "87e459a9a044b3109dfeb943cc82c627b61d84a6",
- "5e09821cbd7d710405b61cab0a795c2982a71b9c",
- "8cc2d4bdb0a15aafc7fe02cdcb03ab90c974cafa",
- "3ce7b902a51bac2f10994f7d1f251b616c975e54",
- "a596972a661d9a7deca8abd18b52ce1a39516e89",
- "8980daf661408a3faa1f22c225702a5c1d11d5c9",
- }},
- */
- /*
- {"https://github.com/spinnaker/spinnaker.git", "b32b2aecae2cfca4840dd480f8082da206a538da", "config/spinnaker.yml", []string{
- "ae904e8d60228c21c47368f6a10f1cc9ca3aeebf",
- "caf6d62e8285d4681514dd8027356fb019bc97ff",
- "eaf7614cad81e8ab5c813dd4821129d0c04ea449",
- "5a2a845bc08974a36d599a4a4b7e25be833823b0",
- "41e96c54a478e5d09dd07ed7feb2d8d08d8c7e3c",
- "974b775a8978b120ff710cac93a21c7387b914c9",
- "ed887f6547d7cd2b2d741184a06f97a0a704152b",
- "d4553dac205023fa77652308af1a2d1cf52138fb",
- "a596972a661d9a7deca8abd18b52ce1a39516e89",
- "66ac94f0b4442707fb6f695fbed91d62b3bd9d4a",
- "079e42e7c979541b6fab7343838f7b9fd4a360cd",
- }},
- */
-}
-
-func (s *ReferencesSuite) TestObjectNotFoundError(c *C) {
- h1 := plumbing.NewHash("af2d6a6954d532f8ffb47615169c8fdf9d383a1a")
- hParent := plumbing.NewHash("1669dce138d9b841a518c64b10914d88f5e488ea")
-
- url := fixtures.ByURL("https://github.com/git-fixtures/basic.git").One().DotGit().Root()
- storer := memory.NewStorage()
- r, err := Clone(storer, nil, &CloneOptions{
- URL: url,
- })
- c.Assert(err, IsNil)
-
- delete(storer.Objects, hParent)
-
- commit, err := r.CommitObject(h1)
- c.Assert(err, IsNil)
-
- _, err = references(commit, "LICENSE")
- c.Assert(err, Equals, plumbing.ErrObjectNotFound)
-}
-
-func (s *ReferencesSuite) TestRevList(c *C) {
- for _, t := range referencesTests {
- r := s.NewRepositoryFromPackfile(fixtures.ByURL(t.repo).One())
-
- commit, err := r.CommitObject(plumbing.NewHash(t.commit))
- c.Assert(err, IsNil)
-
- revs, err := references(commit, t.path)
- c.Assert(err, IsNil)
- c.Assert(len(revs), Equals, len(t.revs))
-
- for i := range revs {
- if revs[i].Hash.String() != t.revs[i] {
- commit, err := s.Repository.CommitObject(plumbing.NewHash(t.revs[i]))
- c.Assert(err, IsNil)
- equiv, err := equivalent(t.path, revs[i], commit)
- c.Assert(err, IsNil)
- if equiv {
- fmt.Printf("cherry-pick detected: %s %s\n", revs[i].Hash.String(), t.revs[i])
- } else {
- c.Fatalf("\nrepo=%s, commit=%s, path=%s, \n%s",
- t.repo, t.commit, t.path, compareSideBySide(t.revs, revs))
- }
- }
- }
- }
-}
-
-// same length is assumed
-func compareSideBySide(a []string, b []*object.Commit) string {
- var buf bytes.Buffer
- buf.WriteString("\t EXPECTED OBTAINED ")
- var sep string
- var obt string
- for i := range a {
- obt = b[i].Hash.String()
- if a[i] != obt {
- sep = "------"
- } else {
- sep = " "
- }
- buf.WriteString(fmt.Sprintf("\n%d", i+1))
- buf.WriteString(sep)
- buf.WriteString(a[i])
- buf.WriteString(sep)
- buf.WriteString(obt)
- }
- return buf.String()
-}
-
-var cherryPicks = [...][]string{
- // repo, path, commit a, commit b
- {"https://github.com/jamesob/desk.git", "desk", "094d0e7d5d69141c98a606910ba64786c5565da0", "3f34438d54f4a1ca86db8c0f03ed8eb38f20e22c"},
-}
-
-// should detect cherry picks
-func (s *ReferencesSuite) TestEquivalent(c *C) {
- for _, t := range cherryPicks {
- cs := s.commits(c, t[0], t[2], t[3])
- equiv, err := equivalent(t[1], cs[0], cs[1])
- c.Assert(err, IsNil)
- c.Assert(equiv, Equals, true, Commentf("repo=%s, file=%s, a=%s b=%s", t[0], t[1], t[2], t[3]))
- }
-}
-
-// returns the commits from a slice of hashes
-func (s *ReferencesSuite) commits(c *C, repo string, hs ...string) []*object.Commit {
- r := s.NewRepositoryFromPackfile(fixtures.ByURL(repo).One())
-
- result := make([]*object.Commit, 0, len(hs))
- for _, h := range hs {
- commit, err := r.CommitObject(plumbing.NewHash(h))
- c.Assert(err, IsNil)
-
- result = append(result, commit)
- }
-
- return result
-}
diff --git a/remote.go b/remote.go
index db78ae7..679e0af 100644
--- a/remote.go
+++ b/remote.go
@@ -33,6 +33,7 @@ var (
ErrDeleteRefNotSupported = errors.New("server does not support delete-refs")
ErrForceNeeded = errors.New("some refs were not updated")
ErrExactSHA1NotSupported = errors.New("server does not support exact SHA1 refspec")
+ ErrEmptyUrls = errors.New("URLs cannot be empty")
)
type NoMatchingRefSpecError struct {
@@ -54,6 +55,9 @@ const (
// repo containing this remote, when not using the multi-ack
// protocol. Setting this to 0 means there is no limit.
maxHavesToVisitPerRef = 100
+
+ // peeledSuffix is the suffix used to build peeled reference names.
+ peeledSuffix = "^{}"
)
// Remote represents a connection to a remote repository.
@@ -109,7 +113,7 @@ func (r *Remote) PushContext(ctx context.Context, o *PushOptions) (err error) {
o.RemoteURL = r.c.URLs[0]
}
- s, err := newSendPackSession(o.RemoteURL, o.Auth, o.InsecureSkipTLS, o.CABundle)
+ s, err := newSendPackSession(o.RemoteURL, o.Auth, o.InsecureSkipTLS, o.CABundle, o.ProxyOptions)
if err != nil {
return err
}
@@ -220,11 +224,13 @@ func (r *Remote) PushContext(ctx context.Context, o *PushOptions) (err error) {
return err
}
- if err = rs.Error(); err != nil {
- return err
+ if rs != nil {
+ if err = rs.Error(); err != nil {
+ return err
+ }
}
- return r.updateRemoteReferenceStorage(req, rs)
+ return r.updateRemoteReferenceStorage(req)
}
func (r *Remote) useRefDeltas(ar *packp.AdvRefs) bool {
@@ -343,7 +349,6 @@ func (r *Remote) newReferenceUpdateRequest(
func (r *Remote) updateRemoteReferenceStorage(
req *packp.ReferenceUpdateRequest,
- result *packp.ReportStatus,
) error {
for _, spec := range r.c.Fetch {
@@ -410,7 +415,7 @@ func (r *Remote) fetch(ctx context.Context, o *FetchOptions) (sto storer.Referen
o.RemoteURL = r.c.URLs[0]
}
- s, err := newUploadPackSession(o.RemoteURL, o.Auth, o.InsecureSkipTLS, o.CABundle)
+ s, err := newUploadPackSession(o.RemoteURL, o.Auth, o.InsecureSkipTLS, o.CABundle, o.ProxyOptions)
if err != nil {
return nil, err
}
@@ -441,7 +446,7 @@ func (r *Remote) fetch(ctx context.Context, o *FetchOptions) (sto storer.Referen
return nil, err
}
- refs, err := calculateRefs(o.RefSpecs, remoteRefs, o.Tags)
+ refs, specToRefs, err := calculateRefs(o.RefSpecs, remoteRefs, o.Tags)
if err != nil {
return nil, err
}
@@ -453,9 +458,9 @@ func (r *Remote) fetch(ctx context.Context, o *FetchOptions) (sto storer.Referen
}
}
- req.Wants, err = getWants(r.s, refs)
+ req.Wants, err = getWants(r.s, refs, o.Depth)
if len(req.Wants) > 0 {
- req.Haves, err = getHaves(localRefs, remoteRefs, r.s)
+ req.Haves, err = getHaves(localRefs, remoteRefs, r.s, o.Depth)
if err != nil {
return nil, err
}
@@ -465,7 +470,7 @@ func (r *Remote) fetch(ctx context.Context, o *FetchOptions) (sto storer.Referen
}
}
- updated, err := r.updateLocalReferenceStorage(o.RefSpecs, refs, remoteRefs, o.Tags, o.Force)
+ updated, err := r.updateLocalReferenceStorage(o.RefSpecs, refs, remoteRefs, specToRefs, o.Tags, o.Force)
if err != nil {
return nil, err
}
@@ -507,8 +512,8 @@ func depthChanged(before []plumbing.Hash, s storage.Storer) (bool, error) {
return false, nil
}
-func newUploadPackSession(url string, auth transport.AuthMethod, insecure bool, cabundle []byte) (transport.UploadPackSession, error) {
- c, ep, err := newClient(url, auth, insecure, cabundle)
+func newUploadPackSession(url string, auth transport.AuthMethod, insecure bool, cabundle []byte, proxyOpts transport.ProxyOptions) (transport.UploadPackSession, error) {
+ c, ep, err := newClient(url, insecure, cabundle, proxyOpts)
if err != nil {
return nil, err
}
@@ -516,8 +521,8 @@ func newUploadPackSession(url string, auth transport.AuthMethod, insecure bool,
return c.NewUploadPackSession(ep, auth)
}
-func newSendPackSession(url string, auth transport.AuthMethod, insecure bool, cabundle []byte) (transport.ReceivePackSession, error) {
- c, ep, err := newClient(url, auth, insecure, cabundle)
+func newSendPackSession(url string, auth transport.AuthMethod, insecure bool, cabundle []byte, proxyOpts transport.ProxyOptions) (transport.ReceivePackSession, error) {
+ c, ep, err := newClient(url, insecure, cabundle, proxyOpts)
if err != nil {
return nil, err
}
@@ -525,13 +530,14 @@ func newSendPackSession(url string, auth transport.AuthMethod, insecure bool, ca
return c.NewReceivePackSession(ep, auth)
}
-func newClient(url string, auth transport.AuthMethod, insecure bool, cabundle []byte) (transport.Transport, *transport.Endpoint, error) {
+func newClient(url string, insecure bool, cabundle []byte, proxyOpts transport.ProxyOptions) (transport.Transport, *transport.Endpoint, error) {
ep, err := transport.NewEndpoint(url)
if err != nil {
return nil, nil, err
}
ep.InsecureSkipTLS = insecure
ep.CaBundle = cabundle
+ ep.Proxy = proxyOpts
c, err := client.NewClient(ep)
if err != nil {
@@ -832,6 +838,7 @@ func getHavesFromRef(
remoteRefs map[plumbing.Hash]bool,
s storage.Storer,
haves map[plumbing.Hash]bool,
+ depth int,
) error {
h := ref.Hash()
if haves[h] {
@@ -857,7 +864,13 @@ func getHavesFromRef(
// commits from the history of each ref.
walker := object.NewCommitPreorderIter(commit, haves, nil)
toVisit := maxHavesToVisitPerRef
- return walker.ForEach(func(c *object.Commit) error {
+ // But only need up to the requested depth
+ if depth > 0 && depth < maxHavesToVisitPerRef {
+ toVisit = depth
+ }
+ // It is safe to ignore any error here as we are just trying to find the references that we already have
+ // An example of a legitimate failure is we have a shallow clone and don't have the previous commit(s)
+ _ = walker.ForEach(func(c *object.Commit) error {
haves[c.Hash] = true
toVisit--
// If toVisit starts out at 0 (indicating there is no
@@ -868,12 +881,15 @@ func getHavesFromRef(
}
return nil
})
+
+ return nil
}
func getHaves(
localRefs []*plumbing.Reference,
remoteRefStorer storer.ReferenceStorer,
s storage.Storer,
+ depth int,
) ([]plumbing.Hash, error) {
haves := map[plumbing.Hash]bool{}
@@ -894,7 +910,7 @@ func getHaves(
continue
}
- err = getHavesFromRef(ref, remoteRefs, s, haves)
+ err = getHavesFromRef(ref, remoteRefs, s, haves, depth)
if err != nil {
return nil, err
}
@@ -914,42 +930,41 @@ func calculateRefs(
spec []config.RefSpec,
remoteRefs storer.ReferenceStorer,
tagMode TagMode,
-) (memory.ReferenceStorage, error) {
+) (memory.ReferenceStorage, [][]*plumbing.Reference, error) {
if tagMode == AllTags {
spec = append(spec, refspecAllTags)
}
refs := make(memory.ReferenceStorage)
- for _, s := range spec {
- if err := doCalculateRefs(s, remoteRefs, refs); err != nil {
- return nil, err
+ // list of references matched for each spec
+ specToRefs := make([][]*plumbing.Reference, len(spec))
+ for i := range spec {
+ var err error
+ specToRefs[i], err = doCalculateRefs(spec[i], remoteRefs, refs)
+ if err != nil {
+ return nil, nil, err
}
}
- return refs, nil
+ return refs, specToRefs, nil
}
func doCalculateRefs(
s config.RefSpec,
remoteRefs storer.ReferenceStorer,
refs memory.ReferenceStorage,
-) error {
- iter, err := remoteRefs.IterReferences()
- if err != nil {
- return err
- }
+) ([]*plumbing.Reference, error) {
+ var refList []*plumbing.Reference
if s.IsExactSHA1() {
ref := plumbing.NewHashReference(s.Dst(""), plumbing.NewHash(s.Src()))
- return refs.SetReference(ref)
+
+ refList = append(refList, ref)
+ return refList, refs.SetReference(ref)
}
var matched bool
- err = iter.ForEach(func(ref *plumbing.Reference) error {
- if !s.Match(ref.Name()) {
- return nil
- }
-
+ onMatched := func(ref *plumbing.Reference) error {
if ref.Type() == plumbing.SymbolicReference {
target, err := storer.ResolveReference(remoteRefs, ref.Name())
if err != nil {
@@ -964,28 +979,47 @@ func doCalculateRefs(
}
matched = true
- if err := refs.SetReference(ref); err != nil {
- return err
- }
+ refList = append(refList, ref)
+ return refs.SetReference(ref)
+ }
- if !s.IsWildcard() {
- return storer.ErrStop
+ var ret error
+ if s.IsWildcard() {
+ iter, err := remoteRefs.IterReferences()
+ if err != nil {
+ return nil, err
}
+ ret = iter.ForEach(func(ref *plumbing.Reference) error {
+ if !s.Match(ref.Name()) {
+ return nil
+ }
- return nil
- })
+ return onMatched(ref)
+ })
+ } else {
+ var resolvedRef *plumbing.Reference
+ src := s.Src()
+ resolvedRef, ret = expand_ref(remoteRefs, plumbing.ReferenceName(src))
+ if ret == nil {
+ ret = onMatched(resolvedRef)
+ }
+ }
if !matched && !s.IsWildcard() {
- return NoMatchingRefSpecError{refSpec: s}
+ return nil, NoMatchingRefSpecError{refSpec: s}
}
- return err
+ return refList, ret
}
-func getWants(localStorer storage.Storer, refs memory.ReferenceStorage) ([]plumbing.Hash, error) {
+func getWants(localStorer storage.Storer, refs memory.ReferenceStorage, depth int) ([]plumbing.Hash, error) {
+ // If depth is anything other than 1 and the repo has shallow commits then just because we have the commit
+ // at the reference doesn't mean that we don't still need to fetch the parents
shallow := false
- if s, _ := localStorer.Shallow(); len(s) > 0 {
- shallow = true
+ if depth != 1 {
+ if s, _ := localStorer.Shallow(); len(s) > 0 {
+ shallow = true
+ }
}
wants := map[plumbing.Hash]bool{}
@@ -1139,27 +1173,28 @@ func buildSidebandIfSupported(l *capability.List, reader io.Reader, p sideband.P
func (r *Remote) updateLocalReferenceStorage(
specs []config.RefSpec,
fetchedRefs, remoteRefs memory.ReferenceStorage,
+ specToRefs [][]*plumbing.Reference,
tagMode TagMode,
force bool,
) (updated bool, err error) {
isWildcard := true
forceNeeded := false
- for _, spec := range specs {
+ for i, spec := range specs {
if !spec.IsWildcard() {
isWildcard = false
}
- for _, ref := range fetchedRefs {
- if !spec.Match(ref.Name()) && !spec.IsExactSHA1() {
- continue
- }
-
+ for _, ref := range specToRefs[i] {
if ref.Type() != plumbing.HashReference {
continue
}
localName := spec.Dst(ref.Name())
+ // If localName doesn't start with "refs/" then treat as a branch.
+ if !strings.HasPrefix(localName.String(), "refs/") {
+ localName = plumbing.NewBranchReferenceName(localName.String())
+ }
old, _ := storer.ResolveReference(r.s, localName)
new := plumbing.NewHashReference(localName, ref.Hash())
@@ -1245,21 +1280,29 @@ func (r *Remote) buildFetchedTags(refs memory.ReferenceStorage) (updated bool, e
// operation is complete, an error is returned. The context only affects to the
// transport operations.
func (r *Remote) ListContext(ctx context.Context, o *ListOptions) (rfs []*plumbing.Reference, err error) {
- refs, err := r.list(ctx, o)
- if err != nil {
- return refs, err
- }
- return refs, nil
+ return r.list(ctx, o)
}
func (r *Remote) List(o *ListOptions) (rfs []*plumbing.Reference, err error) {
- ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ timeout := o.Timeout
+ // Default to the old hardcoded 10s value if a timeout is not explicitly set.
+ if timeout == 0 {
+ timeout = 10
+ }
+ if timeout < 0 {
+ return nil, fmt.Errorf("invalid timeout: %d", timeout)
+ }
+ ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second)
defer cancel()
return r.ListContext(ctx, o)
}
func (r *Remote) list(ctx context.Context, o *ListOptions) (rfs []*plumbing.Reference, err error) {
- s, err := newUploadPackSession(r.c.URLs[0], o.Auth, o.InsecureSkipTLS, o.CABundle)
+ if r.c == nil || len(r.c.URLs) == 0 {
+ return nil, ErrEmptyUrls
+ }
+
+ s, err := newUploadPackSession(r.c.URLs[0], o.Auth, o.InsecureSkipTLS, o.CABundle, o.ProxyOptions)
if err != nil {
return nil, err
}
@@ -1282,13 +1325,22 @@ func (r *Remote) list(ctx context.Context, o *ListOptions) (rfs []*plumbing.Refe
}
var resultRefs []*plumbing.Reference
- err = refs.ForEach(func(ref *plumbing.Reference) error {
- resultRefs = append(resultRefs, ref)
- return nil
- })
- if err != nil {
- return nil, err
+ if o.PeelingOption == AppendPeeled || o.PeelingOption == IgnorePeeled {
+ err = refs.ForEach(func(ref *plumbing.Reference) error {
+ resultRefs = append(resultRefs, ref)
+ return nil
+ })
+ if err != nil {
+ return nil, err
+ }
}
+
+ if o.PeelingOption == AppendPeeled || o.PeelingOption == OnlyPeeled {
+ for k, v := range ar.Peeled {
+ resultRefs = append(resultRefs, plumbing.NewReferenceFromStrings(k+"^{}", v.String()))
+ }
+ }
+
return resultRefs, nil
}
diff --git a/remote_test.go b/remote_test.go
index d0c8fa8..ca5f261 100644
--- a/remote_test.go
+++ b/remote_test.go
@@ -5,16 +5,15 @@ import (
"context"
"errors"
"io"
- "io/ioutil"
"os"
"path/filepath"
"runtime"
+ "strings"
"time"
"github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/cache"
- "github.com/go-git/go-git/v5/plumbing/object"
"github.com/go-git/go-git/v5/plumbing/protocol/packp"
"github.com/go-git/go-git/v5/plumbing/protocol/packp/capability"
"github.com/go-git/go-git/v5/plumbing/storer"
@@ -141,6 +140,62 @@ func (s *RemoteSuite) TestFetch(c *C) {
})
}
+func (s *RemoteSuite) TestFetchToNewBranch(c *C) {
+ r := NewRemote(memory.NewStorage(), &config.RemoteConfig{
+ URLs: []string{s.GetLocalRepositoryURL(fixtures.ByTag("tags").One())},
+ })
+
+ s.testFetch(c, r, &FetchOptions{
+ RefSpecs: []config.RefSpec{
+ // qualified branch to unqualified branch
+ "refs/heads/master:foo",
+ // unqualified branch to unqualified branch
+ "+master:bar",
+ // unqualified tag to unqualified branch
+ config.RefSpec("tree-tag:tree-tag"),
+ // unqualified tag to qualified tag
+ config.RefSpec("+commit-tag:refs/tags/renamed-tag"),
+ },
+ }, []*plumbing.Reference{
+ plumbing.NewReferenceFromStrings("refs/heads/foo", "f7b877701fbf855b44c0a9e86f3fdce2c298b07f"),
+ plumbing.NewReferenceFromStrings("refs/heads/bar", "f7b877701fbf855b44c0a9e86f3fdce2c298b07f"),
+ plumbing.NewReferenceFromStrings("refs/heads/tree-tag", "152175bf7e5580299fa1f0ba41ef6474cc043b70"),
+ plumbing.NewReferenceFromStrings("refs/tags/tree-tag", "152175bf7e5580299fa1f0ba41ef6474cc043b70"),
+ plumbing.NewReferenceFromStrings("refs/tags/renamed-tag", "ad7897c0fb8e7d9a9ba41fa66072cf06095a6cfc"),
+ plumbing.NewReferenceFromStrings("refs/tags/commit-tag", "ad7897c0fb8e7d9a9ba41fa66072cf06095a6cfc"),
+ })
+}
+
+func (s *RemoteSuite) TestFetchToNewBranchWithAllTags(c *C) {
+ r := NewRemote(memory.NewStorage(), &config.RemoteConfig{
+ URLs: []string{s.GetLocalRepositoryURL(fixtures.ByTag("tags").One())},
+ })
+
+ s.testFetch(c, r, &FetchOptions{
+ Tags: AllTags,
+ RefSpecs: []config.RefSpec{
+ // qualified branch to unqualified branch
+ "+refs/heads/master:foo",
+ // unqualified branch to unqualified branch
+ "master:bar",
+ // unqualified tag to unqualified branch
+ config.RefSpec("+tree-tag:tree-tag"),
+ // unqualified tag to qualified tag
+ config.RefSpec("commit-tag:refs/tags/renamed-tag"),
+ },
+ }, []*plumbing.Reference{
+ plumbing.NewReferenceFromStrings("refs/heads/foo", "f7b877701fbf855b44c0a9e86f3fdce2c298b07f"),
+ plumbing.NewReferenceFromStrings("refs/heads/bar", "f7b877701fbf855b44c0a9e86f3fdce2c298b07f"),
+ plumbing.NewReferenceFromStrings("refs/heads/tree-tag", "152175bf7e5580299fa1f0ba41ef6474cc043b70"),
+ plumbing.NewReferenceFromStrings("refs/tags/tree-tag", "152175bf7e5580299fa1f0ba41ef6474cc043b70"),
+ plumbing.NewReferenceFromStrings("refs/tags/renamed-tag", "ad7897c0fb8e7d9a9ba41fa66072cf06095a6cfc"),
+ plumbing.NewReferenceFromStrings("refs/tags/commit-tag", "ad7897c0fb8e7d9a9ba41fa66072cf06095a6cfc"),
+ plumbing.NewReferenceFromStrings("refs/tags/annotated-tag", "b742a2a9fa0afcfa9a6fad080980fbc26b007c69"),
+ plumbing.NewReferenceFromStrings("refs/tags/blob-tag", "fe6cb94756faa81e5ed9240f9191b833db5f40ae"),
+ plumbing.NewReferenceFromStrings("refs/tags/lightweight-tag", "f7b877701fbf855b44c0a9e86f3fdce2c298b07f"),
+ })
+}
+
func (s *RemoteSuite) TestFetchNonExistantReference(c *C) {
r := NewRemote(memory.NewStorage(), &config.RemoteConfig{
URLs: []string{s.GetLocalRepositoryURL(fixtures.ByTag("tags").One())},
@@ -535,10 +590,22 @@ func (s *RemoteSuite) TestPushContext(c *C) {
})
c.Assert(err, IsNil)
- // let the goroutine from pushHashes finish and check that the number of
- // goroutines is the same as before
- time.Sleep(100 * time.Millisecond)
- c.Assert(runtime.NumGoroutine(), Equals, numGoroutines)
+ eventually(c, func() bool {
+ return runtime.NumGoroutine() <= numGoroutines
+ })
+}
+
+func eventually(c *C, condition func() bool) {
+ select {
+ case <-time.After(5 * time.Second):
+ default:
+ if condition() {
+ break
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+
+ c.Assert(condition(), Equals, true)
}
func (s *RemoteSuite) TestPushContextCanceled(c *C) {
@@ -566,10 +633,9 @@ func (s *RemoteSuite) TestPushContextCanceled(c *C) {
})
c.Assert(err, Equals, context.Canceled)
- // let the goroutine from pushHashes finish and check that the number of
- // goroutines is the same as before
- time.Sleep(100 * time.Millisecond)
- c.Assert(runtime.NumGoroutine(), Equals, numGoroutines)
+ eventually(c, func() bool {
+ return runtime.NumGoroutine() <= numGoroutines
+ })
}
func (s *RemoteSuite) TestPushTags(c *C) {
@@ -854,6 +920,7 @@ func (s *RemoteSuite) TestPushForceWithLease_success(c *C) {
c.Assert(sto.SetReference(newCommit), IsNil)
ref, err := sto.Reference("refs/heads/branch")
+ c.Assert(err, IsNil)
c.Log(ref.String())
url := dstFs.Root()
@@ -1163,7 +1230,7 @@ func (s *RemoteSuite) TestGetHaves(c *C) {
),
}
- l, err := getHaves(localRefs, memory.NewStorage(), sto)
+ l, err := getHaves(localRefs, memory.NewStorage(), sto, 0)
c.Assert(err, IsNil)
c.Assert(l, HasLen, 2)
}
@@ -1199,6 +1266,41 @@ func (s *RemoteSuite) TestList(c *C) {
}
}
+func (s *RemoteSuite) TestListPeeling(c *C) {
+ remote := NewRemote(memory.NewStorage(), &config.RemoteConfig{
+ Name: DefaultRemoteName,
+ URLs: []string{"https://github.com/git-fixtures/tags.git"},
+ })
+
+ for _, tc := range []struct {
+ peelingOption PeelingOption
+ expectPeeled bool
+ expectNonPeeled bool
+ }{
+ {peelingOption: AppendPeeled, expectPeeled: true, expectNonPeeled: true},
+ {peelingOption: IgnorePeeled, expectPeeled: false, expectNonPeeled: true},
+ {peelingOption: OnlyPeeled, expectPeeled: true, expectNonPeeled: false},
+ } {
+ refs, err := remote.List(&ListOptions{
+ PeelingOption: tc.peelingOption,
+ })
+ c.Assert(err, IsNil)
+ c.Assert(len(refs) > 0, Equals, true)
+
+ foundPeeled, foundNonPeeled := false, false
+ for _, ref := range refs {
+ if strings.HasSuffix(ref.Name().String(), peeledSuffix) {
+ foundPeeled = true
+ } else {
+ foundNonPeeled = true
+ }
+ }
+
+ c.Assert(foundPeeled, Equals, tc.expectPeeled)
+ c.Assert(foundNonPeeled, Equals, tc.expectNonPeeled)
+ }
+}
+
func (s *RemoteSuite) TestListTimeout(c *C) {
remote := NewRemote(memory.NewStorage(), &config.RemoteConfig{
Name: DefaultRemoteName,
@@ -1339,7 +1441,7 @@ func (s *RemoteSuite) TestPushRequireRemoteRefs(c *C) {
}
func (s *RemoteSuite) TestCanPushShasToReference(c *C) {
- d, err := ioutil.TempDir("", "TestCanPushShasToReference")
+ d, err := os.MkdirTemp("", "TestCanPushShasToReference")
c.Assert(err, IsNil)
if err != nil {
return
@@ -1357,45 +1459,7 @@ func (s *RemoteSuite) TestCanPushShasToReference(c *C) {
c.Assert(err, IsNil)
c.Assert(repo, NotNil)
- fd, err := os.Create(filepath.Join(d, "repo", "README.md"))
- c.Assert(err, IsNil)
- if err != nil {
- return
- }
- _, err = fd.WriteString("# test repo")
- c.Assert(err, IsNil)
- if err != nil {
- return
- }
- err = fd.Close()
- c.Assert(err, IsNil)
- if err != nil {
- return
- }
-
- wt, err := repo.Worktree()
- c.Assert(err, IsNil)
- if err != nil {
- return
- }
-
- wt.Add("README.md")
- sha, err := wt.Commit("test commit", &CommitOptions{
- Author: &object.Signature{
- Name: "test",
- Email: "test@example.com",
- When: time.Now(),
- },
- Committer: &object.Signature{
- Name: "test",
- Email: "test@example.com",
- When: time.Now(),
- },
- })
- c.Assert(err, IsNil)
- if err != nil {
- return
- }
+ sha := CommitNewFile(c, repo, "README.md")
gitremote, err := repo.CreateRemote(&config.RemoteConfig{
Name: "local",
@@ -1425,3 +1489,69 @@ func (s *RemoteSuite) TestCanPushShasToReference(c *C) {
}
c.Assert(ref.Hash().String(), Equals, sha.String())
}
+
+func (s *RemoteSuite) TestFetchAfterShallowClone(c *C) {
+ tempDir, clean := s.TemporalDir()
+ defer clean()
+ remoteUrl := filepath.Join(tempDir, "remote")
+ repoDir := filepath.Join(tempDir, "repo")
+
+ // Create a new repo and add more than 1 commit (so we can have a shallow commit)
+ remote, err := PlainInit(remoteUrl, false)
+ c.Assert(err, IsNil)
+ c.Assert(remote, NotNil)
+
+ _ = CommitNewFile(c, remote, "File1")
+ _ = CommitNewFile(c, remote, "File2")
+
+ // Clone the repo with a depth of 1
+ repo, err := PlainClone(repoDir, false, &CloneOptions{
+ URL: remoteUrl,
+ Depth: 1,
+ Tags: NoTags,
+ SingleBranch: true,
+ ReferenceName: "master",
+ })
+ c.Assert(err, IsNil)
+
+ // Add new commits to the origin (more than 1 so that our next test hits a missing commit)
+ _ = CommitNewFile(c, remote, "File3")
+ sha4 := CommitNewFile(c, remote, "File4")
+
+ // Try fetch with depth of 1 again (note, we need to ensure no remote branch remains pointing at the old commit)
+ r, err := repo.Remote(DefaultRemoteName)
+ c.Assert(err, IsNil)
+ s.testFetch(c, r, &FetchOptions{
+ Depth: 2,
+ Tags: NoTags,
+
+ RefSpecs: []config.RefSpec{
+ "+refs/heads/master:refs/heads/master",
+ "+refs/heads/master:refs/remotes/origin/master",
+ },
+ }, []*plumbing.Reference{
+ plumbing.NewReferenceFromStrings("refs/heads/master", sha4.String()),
+ plumbing.NewReferenceFromStrings("refs/remotes/origin/master", sha4.String()),
+ plumbing.NewSymbolicReference("HEAD", "refs/heads/master"),
+ })
+
+ // Add another commit to the origin
+ sha5 := CommitNewFile(c, remote, "File5")
+
+ // Try fetch with depth of 2 this time (to reach a commit that we don't have locally)
+ r, err = repo.Remote(DefaultRemoteName)
+ c.Assert(err, IsNil)
+ s.testFetch(c, r, &FetchOptions{
+ Depth: 1,
+ Tags: NoTags,
+
+ RefSpecs: []config.RefSpec{
+ "+refs/heads/master:refs/heads/master",
+ "+refs/heads/master:refs/remotes/origin/master",
+ },
+ }, []*plumbing.Reference{
+ plumbing.NewReferenceFromStrings("refs/heads/master", sha5.String()),
+ plumbing.NewReferenceFromStrings("refs/remotes/origin/master", sha5.String()),
+ plumbing.NewSymbolicReference("HEAD", "refs/heads/master"),
+ })
+}
diff --git a/repository.go b/repository.go
index e8eb53f..3154ac0 100644
--- a/repository.go
+++ b/repository.go
@@ -3,32 +3,36 @@ package git
import (
"bytes"
"context"
+ "crypto"
"encoding/hex"
"errors"
"fmt"
- stdioutil "io/ioutil"
+ "io"
"os"
"path"
"path/filepath"
"strings"
"time"
+ "dario.cat/mergo"
"github.com/ProtonMail/go-crypto/openpgp"
"github.com/go-git/go-billy/v5"
"github.com/go-git/go-billy/v5/osfs"
"github.com/go-git/go-billy/v5/util"
"github.com/go-git/go-git/v5/config"
+ "github.com/go-git/go-git/v5/internal/path_util"
"github.com/go-git/go-git/v5/internal/revision"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/cache"
+ formatcfg "github.com/go-git/go-git/v5/plumbing/format/config"
"github.com/go-git/go-git/v5/plumbing/format/packfile"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/plumbing/object"
"github.com/go-git/go-git/v5/plumbing/storer"
"github.com/go-git/go-git/v5/storage"
"github.com/go-git/go-git/v5/storage/filesystem"
"github.com/go-git/go-git/v5/storage/filesystem/dotgit"
"github.com/go-git/go-git/v5/utils/ioutil"
- "github.com/imdario/mergo"
)
// GitDirName this is a special folder where all the git stuff is.
@@ -57,6 +61,7 @@ var (
ErrIsBareRepository = errors.New("worktree not available in a bare repository")
ErrUnableToResolveCommit = errors.New("unable to resolve commit")
ErrPackedObjectsNotSupported = errors.New("packed objects not supported")
+ ErrSHA256NotSupported = errors.New("go-git was not compiled with SHA256 support")
)
// Repository represents a git repository
@@ -67,14 +72,30 @@ type Repository struct {
wt billy.Filesystem
}
+type InitOptions struct {
+ // The default branch (e.g. "refs/heads/master")
+ DefaultBranch plumbing.ReferenceName
+}
+
// Init creates an empty git repository, based on the given Storer and worktree.
// The worktree Filesystem is optional, if nil a bare repository is created. If
// the given storer is not empty ErrRepositoryAlreadyExists is returned
func Init(s storage.Storer, worktree billy.Filesystem) (*Repository, error) {
+ options := InitOptions{
+ DefaultBranch: plumbing.Master,
+ }
+ return InitWithOptions(s, worktree, options)
+}
+
+func InitWithOptions(s storage.Storer, worktree billy.Filesystem, options InitOptions) (*Repository, error) {
if err := initStorer(s); err != nil {
return nil, err
}
+ if options.DefaultBranch == "" {
+ options.DefaultBranch = plumbing.Master
+ }
+
r := newRepository(s, worktree)
_, err := r.Reference(plumbing.HEAD, false)
switch err {
@@ -85,7 +106,7 @@ func Init(s storage.Storer, worktree billy.Filesystem) (*Repository, error) {
return nil, err
}
- h := plumbing.NewSymbolicReference(plumbing.HEAD, plumbing.Master)
+ h := plumbing.NewSymbolicReference(plumbing.HEAD, options.DefaultBranch)
if err := s.SetReference(h); err != nil {
return nil, err
}
@@ -228,6 +249,39 @@ func PlainInit(path string, isBare bool) (*Repository, error) {
return Init(s, wt)
}
+func PlainInitWithOptions(path string, opts *PlainInitOptions) (*Repository, error) {
+ wt := osfs.New(path)
+ dot, _ := wt.Chroot(GitDirName)
+
+ s := filesystem.NewStorage(dot, cache.NewObjectLRUDefault())
+
+ r, err := Init(s, wt)
+ if err != nil {
+ return nil, err
+ }
+
+ cfg, err := r.Config()
+ if err != nil {
+ return nil, err
+ }
+
+ if opts != nil {
+ if opts.ObjectFormat == formatcfg.SHA256 && hash.CryptoType != crypto.SHA256 {
+ return nil, ErrSHA256NotSupported
+ }
+
+ cfg.Core.RepositoryFormatVersion = formatcfg.Version_1
+ cfg.Extensions.ObjectFormat = opts.ObjectFormat
+ }
+
+ err = r.Storer.SetConfig(cfg)
+ if err != nil {
+ return nil, err
+ }
+
+ return r, err
+}
+
// PlainOpen opens a git repository from the given path. It detects if the
// repository is bare or a normal one. If the path doesn't contain a valid
// repository ErrRepositoryNotExists is returned
@@ -269,6 +323,11 @@ func PlainOpenWithOptions(path string, o *PlainOpenOptions) (*Repository, error)
}
func dotGitToOSFilesystems(path string, detect bool) (dot, wt billy.Filesystem, err error) {
+ path, err = path_util.ReplaceTildeWithHome(path)
+ if err != nil {
+ return nil, nil, err
+ }
+
if path, err = filepath.Abs(path); err != nil {
return nil, nil, err
}
@@ -280,6 +339,9 @@ func dotGitToOSFilesystems(path string, detect bool) (dot, wt billy.Filesystem,
pathinfo, err := fs.Stat("/")
if !os.IsNotExist(err) {
+ if pathinfo == nil {
+ return nil, nil, err
+ }
if !pathinfo.IsDir() && detect {
fs = osfs.New(filepath.Dir(path))
}
@@ -327,7 +389,7 @@ func dotGitFileToOSFilesystem(path string, fs billy.Filesystem) (bfs billy.Files
}
defer ioutil.CheckClose(f, &err)
- b, err := stdioutil.ReadAll(f)
+ b, err := io.ReadAll(f)
if err != nil {
return nil, err
}
@@ -356,7 +418,7 @@ func dotGitCommonDirectory(fs billy.Filesystem) (commonDir billy.Filesystem, err
return nil, err
}
- b, err := stdioutil.ReadAll(f)
+ b, err := io.ReadAll(f)
if err != nil {
return nil, err
}
@@ -404,6 +466,9 @@ func PlainCloneContext(ctx context.Context, path string, isBare bool, o *CloneOp
return nil, err
}
+ if o.Mirror {
+ isBare = true
+ }
r, err := PlainInit(path, isBare)
if err != nil {
return nil, err
@@ -747,21 +812,20 @@ func (r *Repository) buildTagSignature(tag *object.Tag, signKey *openpgp.Entity)
// If you want to check to see if the tag is an annotated tag, you can call
// TagObject on the hash of the reference in ForEach:
//
-// ref, err := r.Tag("v0.1.0")
-// if err != nil {
-// // Handle error
-// }
-//
-// obj, err := r.TagObject(ref.Hash())
-// switch err {
-// case nil:
-// // Tag object present
-// case plumbing.ErrObjectNotFound:
-// // Not a tag object
-// default:
-// // Some other error
-// }
+// ref, err := r.Tag("v0.1.0")
+// if err != nil {
+// // Handle error
+// }
//
+// obj, err := r.TagObject(ref.Hash())
+// switch err {
+// case nil:
+// // Tag object present
+// case plumbing.ErrObjectNotFound:
+// // Not a tag object
+// default:
+// // Some other error
+// }
func (r *Repository) Tag(name string) (*plumbing.Reference, error) {
ref, err := r.Reference(plumbing.ReferenceName(path.Join("refs", "tags", name)), false)
if err != nil {
@@ -812,9 +876,10 @@ func (r *Repository) clone(ctx context.Context, o *CloneOptions) error {
}
c := &config.RemoteConfig{
- Name: o.RemoteName,
- URLs: []string{o.URL},
- Fetch: r.cloneRefSpec(o),
+ Name: o.RemoteName,
+ URLs: []string{o.URL},
+ Fetch: r.cloneRefSpec(o),
+ Mirror: o.Mirror,
}
if _, err := r.CreateRemote(c); err != nil {
@@ -830,6 +895,7 @@ func (r *Repository) clone(ctx context.Context, o *CloneOptions) error {
RemoteName: o.RemoteName,
InsecureSkipTLS: o.InsecureSkipTLS,
CABundle: o.CABundle,
+ ProxyOptions: o.ProxyOptions,
}, o.ReferenceName)
if err != nil {
return err
@@ -856,7 +922,13 @@ func (r *Repository) clone(ctx context.Context, o *CloneOptions) error {
if o.RecurseSubmodules != NoRecurseSubmodules {
if err := w.updateSubmodules(&SubmoduleUpdateOptions{
RecurseSubmodules: o.RecurseSubmodules,
- Auth: o.Auth,
+ Depth: func() int {
+ if o.ShallowSubmodules {
+ return 1
+ }
+ return 0
+ }(),
+ Auth: o.Auth,
}); err != nil {
return err
}
@@ -867,7 +939,7 @@ func (r *Repository) clone(ctx context.Context, o *CloneOptions) error {
return err
}
- if ref.Name().IsBranch() {
+ if !o.Mirror && ref.Name().IsBranch() {
branchRef := ref.Name()
branchName := strings.Split(string(branchRef), "refs/heads/")[1]
@@ -898,6 +970,8 @@ const (
func (r *Repository) cloneRefSpec(o *CloneOptions) []config.RefSpec {
switch {
+ case o.Mirror:
+ return []config.RefSpec{"+refs/*:refs/*"}
case o.ReferenceName.IsTag():
return []config.RefSpec{
config.RefSpec(fmt.Sprintf(refspecTag, o.ReferenceName.Short())),
@@ -905,7 +979,6 @@ func (r *Repository) cloneRefSpec(o *CloneOptions) []config.RefSpec {
case o.SingleBranch && o.ReferenceName == plumbing.HEAD:
return []config.RefSpec{
config.RefSpec(fmt.Sprintf(refspecSingleBranchHEAD, o.RemoteName)),
- config.RefSpec(fmt.Sprintf(refspecSingleBranch, plumbing.Master.Short(), o.RemoteName)),
}
case o.SingleBranch:
return []config.RefSpec{
@@ -967,7 +1040,7 @@ func (r *Repository) fetchAndUpdateReferences(
return nil, err
}
- resolvedRef, err := storer.ResolveReference(remoteRefs, ref)
+ resolvedRef, err := expand_ref(remoteRefs, ref)
if err != nil {
return nil, err
}
@@ -1238,26 +1311,25 @@ func commitIterFunc(order LogOrder) func(c *object.Commit) object.CommitIter {
// If you want to check to see if the tag is an annotated tag, you can call
// TagObject on the hash Reference passed in through ForEach:
//
-// iter, err := r.Tags()
-// if err != nil {
-// // Handle error
-// }
-//
-// if err := iter.ForEach(func (ref *plumbing.Reference) error {
-// obj, err := r.TagObject(ref.Hash())
-// switch err {
-// case nil:
-// // Tag object present
-// case plumbing.ErrObjectNotFound:
-// // Not a tag object
-// default:
-// // Some other error
-// return err
-// }
-// }); err != nil {
-// // Handle outer iterator error
-// }
+// iter, err := r.Tags()
+// if err != nil {
+// // Handle error
+// }
//
+// if err := iter.ForEach(func (ref *plumbing.Reference) error {
+// obj, err := r.TagObject(ref.Hash())
+// switch err {
+// case nil:
+// // Tag object present
+// case plumbing.ErrObjectNotFound:
+// // Not a tag object
+// default:
+// // Some other error
+// return err
+// }
+// }); err != nil {
+// // Handle outer iterator error
+// }
func (r *Repository) Tags() (storer.ReferenceIter, error) {
refIter, err := r.Storer.IterReferences()
if err != nil {
@@ -1416,14 +1488,35 @@ func (r *Repository) Worktree() (*Worktree, error) {
return &Worktree{r: r, Filesystem: r.wt}, nil
}
+func expand_ref(s storer.ReferenceStorer, ref plumbing.ReferenceName) (*plumbing.Reference, error) {
+ // For improving troubleshooting, this preserves the error for the provided `ref`,
+ // and returns the error for that specific ref in case all parse rules fails.
+ var ret error
+ for _, rule := range plumbing.RefRevParseRules {
+ resolvedRef, err := storer.ResolveReference(s, plumbing.ReferenceName(fmt.Sprintf(rule, ref)))
+
+ if err == nil {
+ return resolvedRef, nil
+ } else if ret == nil {
+ ret = err
+ }
+ }
+
+ return nil, ret
+}
+
// ResolveRevision resolves revision to corresponding hash. It will always
// resolve to a commit hash, not a tree or annotated tag.
//
// Implemented resolvers : HEAD, branch, tag, heads/branch, refs/heads/branch,
// refs/tags/tag, refs/remotes/origin/branch, refs/remotes/origin/HEAD, tilde and caret (HEAD~1, master~^, tag~2, ref/heads/master~1, ...), selection by text (HEAD^{/fix nasty bug}), hash (prefix and full)
-func (r *Repository) ResolveRevision(rev plumbing.Revision) (*plumbing.Hash, error) {
- p := revision.NewParserFromString(string(rev))
+func (r *Repository) ResolveRevision(in plumbing.Revision) (*plumbing.Hash, error) {
+ rev := in.String()
+ if rev == "" {
+ return &plumbing.ZeroHash, plumbing.ErrReferenceNotFound
+ }
+ p := revision.NewParserFromString(rev)
items, err := p.Parse()
if err != nil {
@@ -1441,13 +1534,9 @@ func (r *Repository) ResolveRevision(rev plumbing.Revision) (*plumbing.Hash, err
tryHashes = append(tryHashes, r.resolveHashPrefix(string(revisionRef))...)
- for _, rule := range append([]string{"%s"}, plumbing.RefRevParseRules...) {
- ref, err := storer.ResolveReference(r.Storer, plumbing.ReferenceName(fmt.Sprintf(rule, revisionRef)))
-
- if err == nil {
- tryHashes = append(tryHashes, ref.Hash())
- break
- }
+ ref, err := expand_ref(r.Storer, plumbing.ReferenceName(revisionRef))
+ if err == nil {
+ tryHashes = append(tryHashes, ref.Hash())
}
// in ambiguous cases, `git rev-parse` will emit a warning, but
@@ -1554,6 +1643,10 @@ func (r *Repository) ResolveRevision(rev plumbing.Revision) (*plumbing.Hash, err
}
}
+ if commit == nil {
+ return &plumbing.ZeroHash, plumbing.ErrReferenceNotFound
+ }
+
return &commit.Hash, nil
}
diff --git a/repository_test.go b/repository_test.go
index e284df8..9e000a3 100644
--- a/repository_test.go
+++ b/repository_test.go
@@ -6,9 +6,9 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"os"
"os/exec"
+ "os/user"
"path/filepath"
"regexp"
"strings"
@@ -52,6 +52,54 @@ func (s *RepositorySuite) TestInit(c *C) {
cfg, err := r.Config()
c.Assert(err, IsNil)
c.Assert(cfg.Core.IsBare, Equals, false)
+
+ // check the HEAD to see what the default branch is
+ createCommit(c, r)
+ ref, err := r.Head()
+ c.Assert(err, IsNil)
+ c.Assert(ref.Name().String(), Equals, plumbing.Master.String())
+}
+
+func (s *RepositorySuite) TestInitWithOptions(c *C) {
+ r, err := InitWithOptions(memory.NewStorage(), memfs.New(), InitOptions{
+ DefaultBranch: "refs/heads/foo",
+ })
+ c.Assert(err, IsNil)
+ c.Assert(r, NotNil)
+ createCommit(c, r)
+
+ ref, err := r.Head()
+ c.Assert(err, IsNil)
+ c.Assert(ref.Name().String(), Equals, "refs/heads/foo")
+
+}
+
+func createCommit(c *C, r *Repository) {
+ // Create a commit so there is a HEAD to check
+ wt, err := r.Worktree()
+ c.Assert(err, IsNil)
+
+ rm, err := wt.Filesystem.Create("foo.txt")
+ c.Assert(err, IsNil)
+
+ _, err = rm.Write([]byte("foo text"))
+ c.Assert(err, IsNil)
+
+ _, err = wt.Add("foo.txt")
+ c.Assert(err, IsNil)
+
+ author := object.Signature{
+ Name: "go-git",
+ Email: "go-git@fake.local",
+ When: time.Now(),
+ }
+ _, err = wt.Commit("test commit message", &CommitOptions{
+ All: true,
+ Author: &author,
+ Committer: &author,
+ })
+ c.Assert(err, IsNil)
+
}
func (s *RepositorySuite) TestInitNonStandardDotGit(c *C) {
@@ -60,17 +108,18 @@ func (s *RepositorySuite) TestInitNonStandardDotGit(c *C) {
fs := osfs.New(dir)
dot, _ := fs.Chroot("storage")
- storage := filesystem.NewStorage(dot, cache.NewObjectLRUDefault())
+ st := filesystem.NewStorage(dot, cache.NewObjectLRUDefault())
wt, _ := fs.Chroot("worktree")
- r, err := Init(storage, wt)
+ r, err := Init(st, wt)
c.Assert(err, IsNil)
c.Assert(r, NotNil)
f, err := fs.Open(fs.Join("worktree", ".git"))
c.Assert(err, IsNil)
+ defer func() { _ = f.Close() }()
- all, err := ioutil.ReadAll(f)
+ all, err := io.ReadAll(f)
c.Assert(err, IsNil)
c.Assert(string(all), Equals, fmt.Sprintf("gitdir: %s\n", filepath.Join("..", "storage")))
@@ -85,9 +134,9 @@ func (s *RepositorySuite) TestInitStandardDotGit(c *C) {
fs := osfs.New(dir)
dot, _ := fs.Chroot(".git")
- storage := filesystem.NewStorage(dot, cache.NewObjectLRUDefault())
+ st := filesystem.NewStorage(dot, cache.NewObjectLRUDefault())
- r, err := Init(storage, fs)
+ r, err := Init(st, fs)
c.Assert(err, IsNil)
c.Assert(r, NotNil)
@@ -189,6 +238,35 @@ func (s *RepositorySuite) TestCloneContext(c *C) {
c.Assert(err, Equals, context.Canceled)
}
+func (s *RepositorySuite) TestCloneMirror(c *C) {
+ r, err := Clone(memory.NewStorage(), nil, &CloneOptions{
+ URL: fixtures.Basic().One().URL,
+ Mirror: true,
+ })
+
+ c.Assert(err, IsNil)
+
+ refs, err := r.References()
+ var count int
+ refs.ForEach(func(r *plumbing.Reference) error { c.Log(r); count++; return nil })
+ c.Assert(err, IsNil)
+ // 6 refs total from github.com/git-fixtures/basic.git:
+ // - HEAD
+ // - refs/heads/master
+ // - refs/heads/branch
+ // - refs/pull/1/head
+ // - refs/pull/2/head
+ // - refs/pull/2/merge
+ c.Assert(count, Equals, 6)
+
+ cfg, err := r.Config()
+ c.Assert(err, IsNil)
+
+ c.Assert(cfg.Core.IsBare, Equals, true)
+ c.Assert(cfg.Remotes[DefaultRemoteName].Validate(), IsNil)
+ c.Assert(cfg.Remotes[DefaultRemoteName].Mirror, Equals, true)
+}
+
func (s *RepositorySuite) TestCloneWithTags(c *C) {
url := s.GetLocalRepositoryURL(
fixtures.ByURL("https://github.com/git-fixtures/tags.git").One(),
@@ -466,6 +544,29 @@ func (s *RepositorySuite) TestPlainOpen(c *C) {
c.Assert(r, NotNil)
}
+func (s *RepositorySuite) TestPlainOpenTildePath(c *C) {
+ dir, clean := s.TemporalHomeDir()
+ defer clean()
+
+ r, err := PlainInit(dir, false)
+ c.Assert(err, IsNil)
+ c.Assert(r, NotNil)
+
+ currentUser, err := user.Current()
+ c.Assert(err, IsNil)
+ // remove domain for windows
+ username := currentUser.Username[strings.Index(currentUser.Username, "\\")+1:]
+
+ homes := []string{"~/", "~" + username + "/"}
+ for _, home := range homes {
+ path := strings.Replace(dir, strings.Split(dir, ".tmp")[0], home, 1)
+
+ r, err = PlainOpen(path)
+ c.Assert(err, IsNil)
+ c.Assert(r, NotNil)
+ }
+}
+
func (s *RepositorySuite) TestPlainOpenBare(c *C) {
dir, clean := s.TemporalDir()
defer clean()
@@ -855,6 +956,43 @@ func (s *RepositorySuite) TestPlainCloneWithRecurseSubmodules(c *C) {
c.Assert(cfg.Submodules, HasLen, 2)
}
+func (s *RepositorySuite) TestPlainCloneWithShallowSubmodules(c *C) {
+ if testing.Short() {
+ c.Skip("skipping test in short mode.")
+ }
+
+ dir, clean := s.TemporalDir()
+ defer clean()
+
+ path := fixtures.ByTag("submodule").One().Worktree().Root()
+ mainRepo, err := PlainClone(dir, false, &CloneOptions{
+ URL: path,
+ RecurseSubmodules: 1,
+ ShallowSubmodules: true,
+ })
+ c.Assert(err, IsNil)
+
+ mainWorktree, err := mainRepo.Worktree()
+ c.Assert(err, IsNil)
+
+ submodule, err := mainWorktree.Submodule("basic")
+ c.Assert(err, IsNil)
+
+ subRepo, err := submodule.Repository()
+ c.Assert(err, IsNil)
+
+ lr, err := subRepo.Log(&LogOptions{})
+ c.Assert(err, IsNil)
+
+ commitCount := 0
+ for _, err := lr.Next(); err == nil; _, err = lr.Next() {
+ commitCount++
+ }
+ c.Assert(err, IsNil)
+
+ c.Assert(commitCount, Equals, 1)
+}
+
func (s *RepositorySuite) TestPlainCloneNoCheckout(c *C) {
dir, clean := s.TemporalDir()
defer clean()
@@ -991,6 +1129,14 @@ func (s *RepositorySuite) TestCloneConfig(c *C) {
}
func (s *RepositorySuite) TestCloneSingleBranchAndNonHEAD(c *C) {
+ s.testCloneSingleBranchAndNonHEADReference(c, "refs/heads/branch")
+}
+
+func (s *RepositorySuite) TestCloneSingleBranchAndNonHEADAndNonFull(c *C) {
+ s.testCloneSingleBranchAndNonHEADReference(c, "branch")
+}
+
+func (s *RepositorySuite) testCloneSingleBranchAndNonHEADReference(c *C, ref string) {
r, _ := Init(memory.NewStorage(), nil)
head, err := r.Head()
@@ -999,7 +1145,7 @@ func (s *RepositorySuite) TestCloneSingleBranchAndNonHEAD(c *C) {
err = r.clone(context.Background(), &CloneOptions{
URL: s.GetBasicLocalRepositoryURL(),
- ReferenceName: plumbing.ReferenceName("refs/heads/branch"),
+ ReferenceName: plumbing.ReferenceName(ref),
SingleBranch: true,
})
@@ -1034,6 +1180,49 @@ func (s *RepositorySuite) TestCloneSingleBranchAndNonHEAD(c *C) {
c.Assert(branch.Hash().String(), Equals, "e8d3ffab552895c19b9fcf7aa264d277cde33881")
}
+func (s *RepositorySuite) TestCloneSingleBranchHEADMain(c *C) {
+ r, _ := Init(memory.NewStorage(), nil)
+
+ head, err := r.Head()
+ c.Assert(err, Equals, plumbing.ErrReferenceNotFound)
+ c.Assert(head, IsNil)
+
+ err = r.clone(context.Background(), &CloneOptions{
+ URL: s.GetLocalRepositoryURL(fixtures.ByTag("no-master-head").One()),
+ SingleBranch: true,
+ })
+
+ c.Assert(err, IsNil)
+
+ remotes, err := r.Remotes()
+ c.Assert(err, IsNil)
+ c.Assert(remotes, HasLen, 1)
+
+ cfg, err := r.Config()
+ c.Assert(err, IsNil)
+ c.Assert(cfg.Branches, HasLen, 1)
+ c.Assert(cfg.Branches["main"].Name, Equals, "main")
+ c.Assert(cfg.Branches["main"].Remote, Equals, "origin")
+ c.Assert(cfg.Branches["main"].Merge, Equals, plumbing.ReferenceName("refs/heads/main"))
+
+ head, err = r.Reference(plumbing.HEAD, false)
+ c.Assert(err, IsNil)
+ c.Assert(head, NotNil)
+ c.Assert(head.Type(), Equals, plumbing.SymbolicReference)
+ c.Assert(head.Target().String(), Equals, "refs/heads/main")
+
+ branch, err := r.Reference(head.Target(), false)
+ c.Assert(err, IsNil)
+ c.Assert(branch, NotNil)
+ c.Assert(branch.Hash().String(), Equals, "786dafbd351e587da1ae97e5fb9fbdf868b4a28f")
+
+ branch, err = r.Reference("refs/remotes/origin/HEAD", false)
+ c.Assert(err, IsNil)
+ c.Assert(branch, NotNil)
+ c.Assert(branch.Type(), Equals, plumbing.HashReference)
+ c.Assert(branch.Hash().String(), Equals, "786dafbd351e587da1ae97e5fb9fbdf868b4a28f")
+}
+
func (s *RepositorySuite) TestCloneSingleBranch(c *C) {
r, _ := Init(memory.NewStorage(), nil)
@@ -1069,12 +1258,6 @@ func (s *RepositorySuite) TestCloneSingleBranch(c *C) {
c.Assert(err, IsNil)
c.Assert(branch, NotNil)
c.Assert(branch.Hash().String(), Equals, "6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
-
- branch, err = r.Reference("refs/remotes/origin/master", false)
- c.Assert(err, IsNil)
- c.Assert(branch, NotNil)
- c.Assert(branch.Type(), Equals, plumbing.HashReference)
- c.Assert(branch.Hash().String(), Equals, "6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
}
func (s *RepositorySuite) TestCloneSingleTag(c *C) {
@@ -2948,6 +3131,20 @@ func (s *RepositorySuite) TestBrokenMultipleShallowFetch(c *C) {
c.Assert(err, IsNil)
}
+func (s *RepositorySuite) TestDotGitToOSFilesystemsInvalidPath(c *C) {
+ _, _, err := dotGitToOSFilesystems("\000", false)
+ c.Assert(err, NotNil)
+}
+
+func (s *RepositorySuite) TestIssue674(c *C) {
+ r, _ := Init(memory.NewStorage(), nil)
+ h, err := r.ResolveRevision(plumbing.Revision(""))
+
+ c.Assert(err, NotNil)
+ c.Assert(h, NotNil)
+ c.Check(h.IsZero(), Equals, true)
+}
+
func BenchmarkObjects(b *testing.B) {
defer fixtures.Clean()
@@ -2958,14 +3155,14 @@ func BenchmarkObjects(b *testing.B) {
b.Run(f.URL, func(b *testing.B) {
fs := f.DotGit()
- storer := filesystem.NewStorage(fs, cache.NewObjectLRUDefault())
+ st := filesystem.NewStorage(fs, cache.NewObjectLRUDefault())
worktree, err := fs.Chroot(filepath.Dir(fs.Root()))
if err != nil {
b.Fatal(err)
}
- repo, err := Open(storer, worktree)
+ repo, err := Open(st, worktree)
if err != nil {
b.Fatal(err)
}
@@ -2995,7 +3192,7 @@ func BenchmarkObjects(b *testing.B) {
func BenchmarkPlainClone(b *testing.B) {
for i := 0; i < b.N; i++ {
- t, err := ioutil.TempDir("", "")
+ t, err := os.MkdirTemp("", "")
if err != nil {
b.Fatal(err)
}
diff --git a/storage/filesystem/dotgit/dotgit.go b/storage/filesystem/dotgit/dotgit.go
index 6c386f7..e02e6dd 100644
--- a/storage/filesystem/dotgit/dotgit.go
+++ b/storage/filesystem/dotgit/dotgit.go
@@ -7,7 +7,6 @@ import (
"errors"
"fmt"
"io"
- stdioutil "io/ioutil"
"os"
"path/filepath"
"sort"
@@ -16,6 +15,7 @@ import (
"github.com/go-git/go-billy/v5/osfs"
"github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-git/v5/storage"
"github.com/go-git/go-git/v5/utils/ioutil"
@@ -552,8 +552,8 @@ func (d *DotGit) hasPack(h plumbing.Hash) error {
}
func (d *DotGit) objectPath(h plumbing.Hash) string {
- hash := h.String()
- return d.fs.Join(objectsPath, hash[0:2], hash[2:40])
+ hex := h.String()
+ return d.fs.Join(objectsPath, hex[0:2], hex[2:hash.HexSize])
}
// incomingObjectPath is intended to add support for a git pre-receive hook
@@ -563,15 +563,16 @@ func (d *DotGit) objectPath(h plumbing.Hash) string {
//
// More on git hooks found here : https://git-scm.com/docs/githooks
// More on 'quarantine'/incoming directory here:
-// https://git-scm.com/docs/git-receive-pack
+//
+// https://git-scm.com/docs/git-receive-pack
func (d *DotGit) incomingObjectPath(h plumbing.Hash) string {
hString := h.String()
if d.incomingDirName == "" {
- return d.fs.Join(objectsPath, hString[0:2], hString[2:40])
+ return d.fs.Join(objectsPath, hString[0:2], hString[2:hash.HexSize])
}
- return d.fs.Join(objectsPath, d.incomingDirName, hString[0:2], hString[2:40])
+ return d.fs.Join(objectsPath, d.incomingDirName, hString[0:2], hString[2:hash.HexSize])
}
// hasIncomingObjects searches for an incoming directory and keeps its name
@@ -581,7 +582,9 @@ func (d *DotGit) hasIncomingObjects() bool {
directoryContents, err := d.fs.ReadDir(objectsPath)
if err == nil {
for _, file := range directoryContents {
- if strings.HasPrefix(file.Name(), "incoming-") && file.IsDir() {
+ if file.IsDir() && (strings.HasPrefix(file.Name(), "tmp_objdir-incoming-") ||
+ // Before Git 2.35 incoming commits directory had another prefix
+ strings.HasPrefix(file.Name(), "incoming-")) {
d.incomingDirName = file.Name()
}
}
@@ -645,7 +648,7 @@ func (d *DotGit) ObjectDelete(h plumbing.Hash) error {
}
func (d *DotGit) readReferenceFrom(rd io.Reader, name string) (ref *plumbing.Reference, err error) {
- b, err := stdioutil.ReadAll(rd)
+ b, err := io.ReadAll(rd)
if err != nil {
return nil, err
}
@@ -716,48 +719,56 @@ func (d *DotGit) Ref(name plumbing.ReferenceName) (*plumbing.Reference, error) {
return d.packedRef(name)
}
-func (d *DotGit) findPackedRefsInFile(f billy.File) ([]*plumbing.Reference, error) {
+func (d *DotGit) findPackedRefsInFile(f billy.File, recv refsRecv) error {
s := bufio.NewScanner(f)
- var refs []*plumbing.Reference
for s.Scan() {
ref, err := d.processLine(s.Text())
if err != nil {
- return nil, err
+ return err
}
- if ref != nil {
- refs = append(refs, ref)
+ if !recv(ref) {
+ // skip parse
+ return nil
}
}
-
- return refs, s.Err()
+ if err := s.Err(); err != nil {
+ return err
+ }
+ return nil
}
-func (d *DotGit) findPackedRefs() (r []*plumbing.Reference, err error) {
+// refsRecv: returning true means that the reference continues to be resolved, otherwise it is stopped, which will speed up the lookup of a single reference.
+type refsRecv func(*plumbing.Reference) bool
+
+func (d *DotGit) findPackedRefs(recv refsRecv) error {
f, err := d.fs.Open(packedRefsPath)
if err != nil {
if os.IsNotExist(err) {
- return nil, nil
+ return nil
}
- return nil, err
+ return err
}
defer ioutil.CheckClose(f, &err)
- return d.findPackedRefsInFile(f)
+ return d.findPackedRefsInFile(f, recv)
}
func (d *DotGit) packedRef(name plumbing.ReferenceName) (*plumbing.Reference, error) {
- refs, err := d.findPackedRefs()
- if err != nil {
+ var ref *plumbing.Reference
+ if err := d.findPackedRefs(func(r *plumbing.Reference) bool {
+ if r != nil && r.Name() == name {
+ ref = r
+ // ref found
+ return false
+ }
+ return true
+ }); err != nil {
return nil, err
}
-
- for _, ref := range refs {
- if ref.Name() == name {
- return ref, nil
- }
+ if ref != nil {
+ return ref, nil
}
-
return nil, plumbing.ErrReferenceNotFound
}
@@ -777,34 +788,22 @@ func (d *DotGit) RemoveRef(name plumbing.ReferenceName) error {
return d.rewritePackedRefsWithoutRef(name)
}
-func (d *DotGit) addRefsFromPackedRefs(refs *[]*plumbing.Reference, seen map[plumbing.ReferenceName]bool) (err error) {
- packedRefs, err := d.findPackedRefs()
- if err != nil {
- return err
- }
-
- for _, ref := range packedRefs {
- if !seen[ref.Name()] {
- *refs = append(*refs, ref)
- seen[ref.Name()] = true
+func refsRecvFunc(refs *[]*plumbing.Reference, seen map[plumbing.ReferenceName]bool) refsRecv {
+ return func(r *plumbing.Reference) bool {
+ if r != nil && !seen[r.Name()] {
+ *refs = append(*refs, r)
+ seen[r.Name()] = true
}
+ return true
}
- return nil
}
-func (d *DotGit) addRefsFromPackedRefsFile(refs *[]*plumbing.Reference, f billy.File, seen map[plumbing.ReferenceName]bool) (err error) {
- packedRefs, err := d.findPackedRefsInFile(f)
- if err != nil {
- return err
- }
+func (d *DotGit) addRefsFromPackedRefs(refs *[]*plumbing.Reference, seen map[plumbing.ReferenceName]bool) (err error) {
+ return d.findPackedRefs(refsRecvFunc(refs, seen))
+}
- for _, ref := range packedRefs {
- if !seen[ref.Name()] {
- *refs = append(*refs, ref)
- seen[ref.Name()] = true
- }
- }
- return nil
+func (d *DotGit) addRefsFromPackedRefsFile(refs *[]*plumbing.Reference, f billy.File, seen map[plumbing.ReferenceName]bool) (err error) {
+ return d.findPackedRefsInFile(f, refsRecvFunc(refs, seen))
}
func (d *DotGit) openAndLockPackedRefs(doCreate bool) (
@@ -943,6 +942,7 @@ func (d *DotGit) walkReferencesTree(refs *[]*plumbing.Reference, relPath []strin
files, err := d.fs.ReadDir(d.fs.Join(relPath...))
if err != nil {
if os.IsNotExist(err) {
+ // a race happened, and our directory is gone now
return nil
}
@@ -960,6 +960,10 @@ func (d *DotGit) walkReferencesTree(refs *[]*plumbing.Reference, relPath []strin
}
ref, err := d.readReferenceFile(".", strings.Join(newRelPath, "/"))
+ if os.IsNotExist(err) {
+ // a race happened, and our file is gone now
+ continue
+ }
if err != nil {
return err
}
diff --git a/storage/filesystem/dotgit/dotgit_test.go b/storage/filesystem/dotgit/dotgit_test.go
index a8f0eb7..1b6c113 100644
--- a/storage/filesystem/dotgit/dotgit_test.go
+++ b/storage/filesystem/dotgit/dotgit_test.go
@@ -4,7 +4,6 @@ import (
"bufio"
"encoding/hex"
"io"
- "io/ioutil"
"os"
"path/filepath"
"runtime"
@@ -374,7 +373,7 @@ func (s *SuiteDotGit) TestConfigWriteAndConfig(c *C) {
f, err = dir.Config()
c.Assert(err, IsNil)
- cnt, err := ioutil.ReadAll(f)
+ cnt, err := io.ReadAll(f)
c.Assert(err, IsNil)
c.Assert(string(cnt), Equals, "foo")
@@ -404,7 +403,7 @@ func (s *SuiteDotGit) TestIndexWriteAndIndex(c *C) {
f, err = dir.Index()
c.Assert(err, IsNil)
- cnt, err := ioutil.ReadAll(f)
+ cnt, err := io.ReadAll(f)
c.Assert(err, IsNil)
c.Assert(string(cnt), Equals, "foo")
@@ -434,7 +433,7 @@ func (s *SuiteDotGit) TestShallowWriteAndShallow(c *C) {
f, err = dir.Shallow()
c.Assert(err, IsNil)
- cnt, err := ioutil.ReadAll(f)
+ cnt, err := io.ReadAll(f)
c.Assert(err, IsNil)
c.Assert(string(cnt), Equals, "foo")
@@ -649,6 +648,27 @@ func (s *SuiteDotGit) TestObject(c *C) {
Equals, true,
)
incomingHash := "9d25e0f9bde9f82882b49fe29117b9411cb157b7" //made up hash
+ incomingDirPath := fs.Join("objects", "tmp_objdir-incoming-123456")
+ incomingFilePath := fs.Join(incomingDirPath, incomingHash[0:2], incomingHash[2:40])
+ fs.MkdirAll(incomingDirPath, os.FileMode(0755))
+ fs.Create(incomingFilePath)
+
+ _, err = dir.Object(plumbing.NewHash(incomingHash))
+ c.Assert(err, IsNil)
+}
+
+func (s *SuiteDotGit) TestPreGit235Object(c *C) {
+ fs := fixtures.ByTag(".git").ByTag("unpacked").One().DotGit()
+ dir := New(fs)
+
+ hash := plumbing.NewHash("03db8e1fbe133a480f2867aac478fd866686d69e")
+ file, err := dir.Object(hash)
+ c.Assert(err, IsNil)
+ c.Assert(strings.HasSuffix(
+ file.Name(), fs.Join("objects", "03", "db8e1fbe133a480f2867aac478fd866686d69e")),
+ Equals, true,
+ )
+ incomingHash := "9d25e0f9bde9f82882b49fe29117b9411cb157b7" //made up hash
incomingDirPath := fs.Join("objects", "incoming-123456")
incomingFilePath := fs.Join(incomingDirPath, incomingHash[0:2], incomingHash[2:40])
fs.MkdirAll(incomingDirPath, os.FileMode(0755))
@@ -666,7 +686,7 @@ func (s *SuiteDotGit) TestObjectStat(c *C) {
_, err := dir.ObjectStat(hash)
c.Assert(err, IsNil)
incomingHash := "9d25e0f9bde9f82882b49fe29117b9411cb157b7" //made up hash
- incomingDirPath := fs.Join("objects", "incoming-123456")
+ incomingDirPath := fs.Join("objects", "tmp_objdir-incoming-123456")
incomingFilePath := fs.Join(incomingDirPath, incomingHash[0:2], incomingHash[2:40])
fs.MkdirAll(incomingDirPath, os.FileMode(0755))
fs.Create(incomingFilePath)
@@ -684,7 +704,7 @@ func (s *SuiteDotGit) TestObjectDelete(c *C) {
c.Assert(err, IsNil)
incomingHash := "9d25e0f9bde9f82882b49fe29117b9411cb157b7" //made up hash
- incomingDirPath := fs.Join("objects", "incoming-123456")
+ incomingDirPath := fs.Join("objects", "tmp_objdir-incoming-123456")
incomingSubDirPath := fs.Join(incomingDirPath, incomingHash[0:2])
incomingFilePath := fs.Join(incomingSubDirPath, incomingHash[2:40])
@@ -864,3 +884,71 @@ func (s *SuiteDotGit) TestIncBytes(c *C) {
c.Assert(overflow, Equals, test.overflow)
}
}
+
+// this filesystem wrapper returns os.ErrNotExist if the file matches
+// the provided paths list
+type notExistsFS struct {
+ billy.Filesystem
+
+ paths []string
+}
+
+func (f *notExistsFS) matches(path string) bool {
+ p := filepath.ToSlash(path)
+ for _, n := range f.paths {
+ if p == n {
+ return true
+ }
+ }
+ return false
+}
+
+func (f *notExistsFS) Open(filename string) (billy.File, error) {
+ if f.matches(filename) {
+ return nil, os.ErrNotExist
+ }
+
+ return f.Filesystem.Open(filename)
+}
+
+func (f *notExistsFS) ReadDir(path string) ([]os.FileInfo, error) {
+ if f.matches(path) {
+ return nil, os.ErrNotExist
+ }
+
+ return f.Filesystem.ReadDir(path)
+}
+
+func (s *SuiteDotGit) TestDeletedRefs(c *C) {
+ fs, clean := s.TemporalFilesystem()
+ defer clean()
+
+ dir := New(&notExistsFS{
+ Filesystem: fs,
+ paths: []string{
+ "refs/heads/bar",
+ "refs/heads/baz",
+ },
+ })
+
+ err := dir.SetRef(plumbing.NewReferenceFromStrings(
+ "refs/heads/foo",
+ "e8d3ffab552895c19b9fcf7aa264d277cde33881",
+ ), nil)
+ c.Assert(err, IsNil)
+ err = dir.SetRef(plumbing.NewReferenceFromStrings(
+ "refs/heads/bar",
+ "a8d3ffab552895c19b9fcf7aa264d277cde33881",
+ ), nil)
+ c.Assert(err, IsNil)
+ err = dir.SetRef(plumbing.NewReferenceFromStrings(
+ "refs/heads/baz/baz",
+ "a8d3ffab552895c19b9fcf7aa264d277cde33881",
+ ), nil)
+ c.Assert(err, IsNil)
+
+ refs, err := dir.Refs()
+ c.Assert(err, IsNil)
+ c.Assert(refs, HasLen, 1)
+ c.Assert(refs[0].Name(), Equals, plumbing.ReferenceName("refs/heads/foo"))
+}
diff --git a/storage/filesystem/dotgit/writers.go b/storage/filesystem/dotgit/writers.go
index e2ede93..849b7a1 100644
--- a/storage/filesystem/dotgit/writers.go
+++ b/storage/filesystem/dotgit/writers.go
@@ -9,6 +9,7 @@ import (
"github.com/go-git/go-git/v5/plumbing/format/idxfile"
"github.com/go-git/go-git/v5/plumbing/format/objfile"
"github.com/go-git/go-git/v5/plumbing/format/packfile"
+ "github.com/go-git/go-git/v5/plumbing/hash"
"github.com/go-git/go-billy/v5"
)
@@ -277,8 +278,8 @@ func (w *ObjectWriter) Close() error {
}
func (w *ObjectWriter) save() error {
- hash := w.Hash().String()
- file := w.fs.Join(objectsPath, hash[0:2], hash[2:40])
+ hex := w.Hash().String()
+ file := w.fs.Join(objectsPath, hex[0:2], hex[2:hash.HexSize])
return w.fs.Rename(w.f.Name(), file)
}
diff --git a/storage/filesystem/object.go b/storage/filesystem/object.go
index 5c91bcd..846a7b8 100644
--- a/storage/filesystem/object.go
+++ b/storage/filesystem/object.go
@@ -4,6 +4,7 @@ import (
"bytes"
"io"
"os"
+ "sync"
"time"
"github.com/go-git/go-git/v5/plumbing"
@@ -419,10 +420,21 @@ func (s *ObjectStorage) getFromUnpacked(h plumbing.Hash) (obj plumbing.EncodedOb
s.objectCache.Put(obj)
- _, err = io.Copy(w, r)
+ bufp := copyBufferPool.Get().(*[]byte)
+ buf := *bufp
+ _, err = io.CopyBuffer(w, r, buf)
+ copyBufferPool.Put(bufp)
+
return obj, err
}
+var copyBufferPool = sync.Pool{
+ New: func() interface{} {
+ b := make([]byte, 32*1024)
+ return &b
+ },
+}
+
// Get returns the object with the given hash, by searching for it in
// the packfile.
func (s *ObjectStorage) getFromPackfile(h plumbing.Hash, canBeDelta bool) (
@@ -525,14 +537,21 @@ func (s *ObjectStorage) findObjectInPackfile(h plumbing.Hash) (plumbing.Hash, pl
return plumbing.ZeroHash, plumbing.ZeroHash, -1
}
+// HashesWithPrefix returns all objects with a hash that starts with a prefix by searching for
+// them in the packfile and the git object directories.
func (s *ObjectStorage) HashesWithPrefix(prefix []byte) ([]plumbing.Hash, error) {
hashes, err := s.dir.ObjectsWithPrefix(prefix)
if err != nil {
return nil, err
}
+ seen := hashListAsMap(hashes)
+
// TODO: This could be faster with some idxfile changes,
// or diving into the packfile.
+ if err := s.requireIndex(); err != nil {
+ return nil, err
+ }
for _, index := range s.index {
ei, err := index.Entries()
if err != nil {
@@ -546,6 +565,9 @@ func (s *ObjectStorage) HashesWithPrefix(prefix []byte) ([]plumbing.Hash, error)
return nil, err
}
if bytes.HasPrefix(e.Hash[:], prefix) {
+ if _, ok := seen[e.Hash]; ok {
+ continue
+ }
hashes = append(hashes, e.Hash)
}
}
diff --git a/storage/filesystem/object_test.go b/storage/filesystem/object_test.go
index 19a7914..251077a 100644
--- a/storage/filesystem/object_test.go
+++ b/storage/filesystem/object_test.go
@@ -4,7 +4,6 @@ import (
"encoding/hex"
"fmt"
"io"
- "io/ioutil"
"os"
"path/filepath"
"testing"
@@ -406,6 +405,21 @@ func (s *FsSuite) TestHashesWithPrefix(c *C) {
c.Assert(hashes[0].String(), Equals, "f3dfe29d268303fc6e1bbce268605fc99573406e")
}
+func (s *FsSuite) TestHashesWithPrefixFromPackfile(c *C) {
+ // Same setup as TestGetFromPackfile
+ fixtures.Basic().ByTag(".git").Test(c, func(f *fixtures.Fixture) {
+ fs := f.DotGit()
+ o := NewObjectStorage(dotgit.New(fs), cache.NewObjectLRUDefault())
+
+ expected := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5")
+ // Only pass the first 8 bytes
+ hashes, err := o.HashesWithPrefix(expected[:8])
+ c.Assert(err, IsNil)
+ c.Assert(hashes, HasLen, 1)
+ c.Assert(hashes[0], Equals, expected)
+ })
+}
+
func BenchmarkPackfileIter(b *testing.B) {
defer fixtures.Clean()
@@ -495,7 +509,7 @@ func BenchmarkPackfileIterReadContent(b *testing.B) {
b.Fatal(err)
}
- if _, err := ioutil.ReadAll(r); err != nil {
+ if _, err := io.ReadAll(r); err != nil {
b.Fatal(err)
}
diff --git a/storage/filesystem/shallow.go b/storage/filesystem/shallow.go
index afb600c..ac48fdf 100644
--- a/storage/filesystem/shallow.go
+++ b/storage/filesystem/shallow.go
@@ -34,7 +34,7 @@ func (s *ShallowStorage) SetShallow(commits []plumbing.Hash) error {
return err
}
-// Shallow return the shallow commits reading from shallo file from .git
+// Shallow returns the shallow commits reading from shallo file from .git
func (s *ShallowStorage) Shallow() ([]plumbing.Hash, error) {
f, err := s.dir.Shallow()
if f == nil || err != nil {
diff --git a/storage/test/storage_suite.go b/storage/test/storage_suite.go
index 2c00e75..ee67fc7 100644
--- a/storage/test/storage_suite.go
+++ b/storage/test/storage_suite.go
@@ -5,7 +5,6 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing"
@@ -502,12 +501,12 @@ func objectEquals(a plumbing.EncodedObject, b plumbing.EncodedObject) error {
return fmt.Errorf("can't get reader on b: %q", err)
}
- ca, err := ioutil.ReadAll(ra)
+ ca, err := io.ReadAll(ra)
if err != nil {
return fmt.Errorf("error reading a: %q", err)
}
- cb, err := ioutil.ReadAll(rb)
+ cb, err := io.ReadAll(rb)
if err != nil {
return fmt.Errorf("error reading b: %q", err)
}
diff --git a/submodule.go b/submodule.go
index a202a9b..84f020d 100644
--- a/submodule.go
+++ b/submodule.go
@@ -5,13 +5,13 @@ import (
"context"
"errors"
"fmt"
- "net/url"
"path"
"github.com/go-git/go-billy/v5"
"github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/format/index"
+ "github.com/go-git/go-git/v5/plumbing/transport"
)
var (
@@ -133,29 +133,29 @@ func (s *Submodule) Repository() (*Repository, error) {
return nil, err
}
- moduleURL, err := url.Parse(s.c.URL)
+ moduleEndpoint, err := transport.NewEndpoint(s.c.URL)
if err != nil {
return nil, err
}
- if !path.IsAbs(moduleURL.Path) {
+ if !path.IsAbs(moduleEndpoint.Path) && moduleEndpoint.Protocol == "file" {
remotes, err := s.w.r.Remotes()
if err != nil {
return nil, err
}
- rootURL, err := url.Parse(remotes[0].c.URLs[0])
+ rootEndpoint, err := transport.NewEndpoint(remotes[0].c.URLs[0])
if err != nil {
return nil, err
}
- rootURL.Path = path.Join(rootURL.Path, moduleURL.Path)
- *moduleURL = *rootURL
+ rootEndpoint.Path = path.Join(rootEndpoint.Path, moduleEndpoint.Path)
+ *moduleEndpoint = *rootEndpoint
}
_, err = r.CreateRemote(&config.RemoteConfig{
Name: DefaultRemoteName,
- URLs: []string{moduleURL.String()},
+ URLs: []string{moduleEndpoint.String()},
})
return r, err
@@ -243,7 +243,7 @@ func (s *Submodule) fetchAndCheckout(
ctx context.Context, r *Repository, o *SubmoduleUpdateOptions, hash plumbing.Hash,
) error {
if !o.NoFetch {
- err := r.FetchContext(ctx, &FetchOptions{Auth: o.Auth})
+ err := r.FetchContext(ctx, &FetchOptions{Auth: o.Auth, Depth: o.Depth})
if err != nil && err != NoErrAlreadyUpToDate {
return err
}
@@ -265,6 +265,7 @@ func (s *Submodule) fetchAndCheckout(
err := r.FetchContext(ctx, &FetchOptions{
Auth: o.Auth,
RefSpecs: []config.RefSpec{refSpec},
+ Depth: o.Depth,
})
if err != nil && err != NoErrAlreadyUpToDate && err != ErrExactSHA1NotSupported {
return err
diff --git a/submodule_test.go b/submodule_test.go
index 4bae544..0e88391 100644
--- a/submodule_test.go
+++ b/submodule_test.go
@@ -2,11 +2,13 @@ package git
import (
"context"
- "os"
"path/filepath"
"testing"
+ "github.com/go-git/go-billy/v5/memfs"
+ "github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/storage/memory"
fixtures "github.com/go-git/go-git-fixtures/v4"
. "gopkg.in/check.v1"
@@ -15,7 +17,7 @@ import (
type SubmoduleSuite struct {
BaseSuite
Worktree *Worktree
- path string
+ clean func()
}
var _ = Suite(&SubmoduleSuite{})
@@ -23,8 +25,8 @@ var _ = Suite(&SubmoduleSuite{})
func (s *SubmoduleSuite) SetUpTest(c *C) {
path := fixtures.ByTag("submodule").One().Worktree().Root()
- dir, clean := s.TemporalDir()
- defer clean()
+ var dir string
+ dir, s.clean = s.TemporalDir()
r, err := PlainClone(filepath.Join(dir, "worktree"), false, &CloneOptions{
URL: path,
@@ -35,13 +37,10 @@ func (s *SubmoduleSuite) SetUpTest(c *C) {
s.Repository = r
s.Worktree, err = r.Worktree()
c.Assert(err, IsNil)
-
- s.path = dir
}
-func (s *SubmoduleSuite) TearDownTest(c *C) {
- err := os.RemoveAll(s.path)
- c.Assert(err, IsNil)
+func (s *SubmoduleSuite) TearDownTest(_ *C) {
+ s.clean()
}
func (s *SubmoduleSuite) TestInit(c *C) {
@@ -198,7 +197,7 @@ func (s *SubmoduleSuite) TestSubmodulesInit(c *C) {
func (s *SubmoduleSuite) TestGitSubmodulesSymlink(c *C) {
f, err := s.Worktree.Filesystem.Create("badfile")
c.Assert(err, IsNil)
- defer f.Close()
+ defer func() { _ = f.Close() }()
err = s.Worktree.Filesystem.Remove(gitmodulesFile)
c.Assert(err, IsNil)
@@ -233,3 +232,55 @@ func (s *SubmoduleSuite) TestSubmodulesUpdateContext(c *C) {
err = sm.UpdateContext(ctx, &SubmoduleUpdateOptions{Init: true})
c.Assert(err, NotNil)
}
+
+func (s *SubmoduleSuite) TestSubmodulesFetchDepth(c *C) {
+ if testing.Short() {
+ c.Skip("skipping test in short mode.")
+ }
+
+ sm, err := s.Worktree.Submodule("basic")
+ c.Assert(err, IsNil)
+
+ err = sm.Update(&SubmoduleUpdateOptions{
+ Init: true,
+ Depth: 1,
+ })
+ c.Assert(err, IsNil)
+
+ r, err := sm.Repository()
+ c.Assert(err, IsNil)
+
+ lr, err := r.Log(&LogOptions{})
+ c.Assert(err, IsNil)
+
+ commitCount := 0
+ for _, err := lr.Next(); err == nil; _, err = lr.Next() {
+ commitCount++
+ }
+ c.Assert(err, IsNil)
+
+ c.Assert(commitCount, Equals, 1)
+}
+
+func (s *SubmoduleSuite) TestSubmoduleParseScp(c *C) {
+ repo := &Repository{
+ Storer: memory.NewStorage(),
+ wt: memfs.New(),
+ }
+ worktree := &Worktree{
+ Filesystem: memfs.New(),
+ r: repo,
+ }
+ submodule := &Submodule{
+ initialized: true,
+ c: nil,
+ w: worktree,
+ }
+
+ submodule.c = &config.Submodule{
+ URL: "git@github.com:username/submodule_repo",
+ }
+
+ _, err := submodule.Repository()
+ c.Assert(err, IsNil)
+}
diff --git a/utils/ioutil/common_test.go b/utils/ioutil/common_test.go
index 27bfa62..e3c9d69 100644
--- a/utils/ioutil/common_test.go
+++ b/utils/ioutil/common_test.go
@@ -3,7 +3,7 @@ package ioutil
import (
"bytes"
"context"
- "io/ioutil"
+ "io"
"strings"
"testing"
@@ -38,7 +38,7 @@ func (s *CommonSuite) TestNonEmptyReader_NonEmpty(c *C) {
c.Assert(err, IsNil)
c.Assert(r, NotNil)
- read, err := ioutil.ReadAll(r)
+ read, err := io.ReadAll(r)
c.Assert(err, IsNil)
c.Assert(string(read), Equals, "1")
}
@@ -48,7 +48,7 @@ func (s *CommonSuite) TestNewReadCloser(c *C) {
closer := &closer{}
r := NewReadCloser(buf, closer)
- read, err := ioutil.ReadAll(r)
+ read, err := io.ReadAll(r)
c.Assert(err, IsNil)
c.Assert(string(read), Equals, "1")
@@ -160,7 +160,7 @@ func ExampleCheckClose() {
// CheckClose is commonly used with named return values
f := func() (err error) {
// Get a io.ReadCloser
- r := ioutil.NopCloser(strings.NewReader("foo"))
+ r := io.NopCloser(strings.NewReader("foo"))
// defer CheckClose call with an io.Closer and pointer to error
defer CheckClose(r, &err)
diff --git a/utils/sync/bufio.go b/utils/sync/bufio.go
new file mode 100644
index 0000000..5009ea8
--- /dev/null
+++ b/utils/sync/bufio.go
@@ -0,0 +1,29 @@
+package sync
+
+import (
+ "bufio"
+ "io"
+ "sync"
+)
+
+var bufioReader = sync.Pool{
+ New: func() interface{} {
+ return bufio.NewReader(nil)
+ },
+}
+
+// GetBufioReader returns a *bufio.Reader that is managed by a sync.Pool.
+// Returns a bufio.Reader that is resetted with reader and ready for use.
+//
+// After use, the *bufio.Reader should be put back into the sync.Pool
+// by calling PutBufioReader.
+func GetBufioReader(reader io.Reader) *bufio.Reader {
+ r := bufioReader.Get().(*bufio.Reader)
+ r.Reset(reader)
+ return r
+}
+
+// PutBufioReader puts reader back into its sync.Pool.
+func PutBufioReader(reader *bufio.Reader) {
+ bufioReader.Put(reader)
+}
diff --git a/utils/sync/bufio_test.go b/utils/sync/bufio_test.go
new file mode 100644
index 0000000..e70f3d8
--- /dev/null
+++ b/utils/sync/bufio_test.go
@@ -0,0 +1,26 @@
+package sync
+
+import (
+ "io"
+ "strings"
+ "testing"
+)
+
+func TestGetAndPutBufioReader(t *testing.T) {
+ wanted := "someinput"
+ r := GetBufioReader(strings.NewReader(wanted))
+ if r == nil {
+ t.Error("nil was not expected")
+ }
+
+ got, err := r.ReadString(0)
+ if err != nil && err != io.EOF {
+ t.Errorf("unexpected error reading string: %v", err)
+ }
+
+ if wanted != got {
+ t.Errorf("wanted %q got %q", wanted, got)
+ }
+
+ PutBufioReader(r)
+}
diff --git a/utils/sync/bytes.go b/utils/sync/bytes.go
new file mode 100644
index 0000000..dd06fc0
--- /dev/null
+++ b/utils/sync/bytes.go
@@ -0,0 +1,51 @@
+package sync
+
+import (
+ "bytes"
+ "sync"
+)
+
+var (
+ byteSlice = sync.Pool{
+ New: func() interface{} {
+ b := make([]byte, 16*1024)
+ return &b
+ },
+ }
+ bytesBuffer = sync.Pool{
+ New: func() interface{} {
+ return bytes.NewBuffer(nil)
+ },
+ }
+)
+
+// GetByteSlice returns a *[]byte that is managed by a sync.Pool.
+// The initial slice length will be 16384 (16kb).
+//
+// After use, the *[]byte should be put back into the sync.Pool
+// by calling PutByteSlice.
+func GetByteSlice() *[]byte {
+ buf := byteSlice.Get().(*[]byte)
+ return buf
+}
+
+// PutByteSlice puts buf back into its sync.Pool.
+func PutByteSlice(buf *[]byte) {
+ byteSlice.Put(buf)
+}
+
+// GetBytesBuffer returns a *bytes.Buffer that is managed by a sync.Pool.
+// Returns a buffer that is resetted and ready for use.
+//
+// After use, the *bytes.Buffer should be put back into the sync.Pool
+// by calling PutBytesBuffer.
+func GetBytesBuffer() *bytes.Buffer {
+ buf := bytesBuffer.Get().(*bytes.Buffer)
+ buf.Reset()
+ return buf
+}
+
+// PutBytesBuffer puts buf back into its sync.Pool.
+func PutBytesBuffer(buf *bytes.Buffer) {
+ bytesBuffer.Put(buf)
+}
diff --git a/utils/sync/bytes_test.go b/utils/sync/bytes_test.go
new file mode 100644
index 0000000..b233429
--- /dev/null
+++ b/utils/sync/bytes_test.go
@@ -0,0 +1,49 @@
+package sync
+
+import (
+ "testing"
+)
+
+func TestGetAndPutBytesBuffer(t *testing.T) {
+ buf := GetBytesBuffer()
+ if buf == nil {
+ t.Error("nil was not expected")
+ }
+
+ initialLen := buf.Len()
+ buf.Grow(initialLen * 2)
+ grownLen := buf.Len()
+
+ PutBytesBuffer(buf)
+
+ buf = GetBytesBuffer()
+ if buf.Len() != grownLen {
+ t.Error("bytes buffer was not reused")
+ }
+
+ buf2 := GetBytesBuffer()
+ if buf2.Len() != initialLen {
+ t.Errorf("new bytes buffer length: wanted %d got %d", initialLen, buf2.Len())
+ }
+}
+
+func TestGetAndPutByteSlice(t *testing.T) {
+ slice := GetByteSlice()
+ if slice == nil {
+ t.Error("nil was not expected")
+ }
+
+ wanted := 16 * 1024
+ got := len(*slice)
+ if wanted != got {
+ t.Errorf("byte slice length: wanted %d got %d", wanted, got)
+ }
+
+ newByteSlice := make([]byte, wanted*2)
+ PutByteSlice(&newByteSlice)
+
+ newSlice := GetByteSlice()
+ if len(*newSlice) != len(newByteSlice) {
+ t.Error("byte slice was not reused")
+ }
+}
diff --git a/utils/sync/zlib.go b/utils/sync/zlib.go
new file mode 100644
index 0000000..c613885
--- /dev/null
+++ b/utils/sync/zlib.go
@@ -0,0 +1,74 @@
+package sync
+
+import (
+ "bytes"
+ "compress/zlib"
+ "io"
+ "sync"
+)
+
+var (
+ zlibInitBytes = []byte{0x78, 0x9c, 0x01, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0x00, 0x01}
+ zlibReader = sync.Pool{
+ New: func() interface{} {
+ r, _ := zlib.NewReader(bytes.NewReader(zlibInitBytes))
+ return ZLibReader{
+ Reader: r.(zlibReadCloser),
+ }
+ },
+ }
+ zlibWriter = sync.Pool{
+ New: func() interface{} {
+ return zlib.NewWriter(nil)
+ },
+ }
+)
+
+type zlibReadCloser interface {
+ io.ReadCloser
+ zlib.Resetter
+}
+
+type ZLibReader struct {
+ dict *[]byte
+ Reader zlibReadCloser
+}
+
+// GetZlibReader returns a ZLibReader that is managed by a sync.Pool.
+// Returns a ZLibReader that is resetted using a dictionary that is
+// also managed by a sync.Pool.
+//
+// After use, the ZLibReader should be put back into the sync.Pool
+// by calling PutZlibReader.
+func GetZlibReader(r io.Reader) (ZLibReader, error) {
+ z := zlibReader.Get().(ZLibReader)
+ z.dict = GetByteSlice()
+
+ err := z.Reader.Reset(r, *z.dict)
+
+ return z, err
+}
+
+// PutZlibReader puts z back into its sync.Pool, first closing the reader.
+// The Byte slice dictionary is also put back into its sync.Pool.
+func PutZlibReader(z ZLibReader) {
+ z.Reader.Close()
+ PutByteSlice(z.dict)
+ zlibReader.Put(z)
+}
+
+// GetZlibWriter returns a *zlib.Writer that is managed by a sync.Pool.
+// Returns a writer that is resetted with w and ready for use.
+//
+// After use, the *zlib.Writer should be put back into the sync.Pool
+// by calling PutZlibWriter.
+func GetZlibWriter(w io.Writer) *zlib.Writer {
+ z := zlibWriter.Get().(*zlib.Writer)
+ z.Reset(w)
+ return z
+}
+
+// PutZlibWriter puts w back into its sync.Pool.
+func PutZlibWriter(w *zlib.Writer) {
+ zlibWriter.Put(w)
+}
diff --git a/utils/sync/zlib_test.go b/utils/sync/zlib_test.go
new file mode 100644
index 0000000..b736fb2
--- /dev/null
+++ b/utils/sync/zlib_test.go
@@ -0,0 +1,74 @@
+package sync
+
+import (
+ "bytes"
+ "compress/zlib"
+ "io"
+ "testing"
+)
+
+func TestGetAndPutZlibReader(t *testing.T) {
+ _, err := GetZlibReader(bytes.NewReader(zlibInitBytes))
+ if err != nil {
+ t.Errorf("unexpected error: %v", err)
+ }
+
+ dict := &[]byte{}
+ reader := FakeZLibReader{}
+ PutZlibReader(ZLibReader{dict: dict, Reader: &reader})
+
+ if !reader.wasClosed {
+ t.Errorf("reader was not closed")
+ }
+
+ z2, err := GetZlibReader(bytes.NewReader(zlibInitBytes))
+ if err != nil {
+ t.Errorf("unexpected error: %v", err)
+ }
+
+ if dict != z2.dict {
+ t.Errorf("zlib dictionary was not reused")
+ }
+
+ if &reader != z2.Reader {
+ t.Errorf("zlib reader was not reused")
+ }
+
+ if !reader.wasReset {
+ t.Errorf("reader was not reset")
+ }
+}
+
+func TestGetAndPutZlibWriter(t *testing.T) {
+ w := GetZlibWriter(nil)
+ if w == nil {
+ t.Errorf("nil was not expected")
+ }
+
+ newW := zlib.NewWriter(nil)
+ PutZlibWriter(newW)
+
+ w2 := GetZlibWriter(nil)
+ if w2 != newW {
+ t.Errorf("zlib writer was not reused")
+ }
+}
+
+type FakeZLibReader struct {
+ wasClosed bool
+ wasReset bool
+}
+
+func (f *FakeZLibReader) Reset(r io.Reader, dict []byte) error {
+ f.wasReset = true
+ return nil
+}
+
+func (f *FakeZLibReader) Read(p []byte) (n int, err error) {
+ return 0, nil
+}
+
+func (f *FakeZLibReader) Close() error {
+ f.wasClosed = true
+ return nil
+}
diff --git a/worktree.go b/worktree.go
index c974aed..595dcea 100644
--- a/worktree.go
+++ b/worktree.go
@@ -5,11 +5,9 @@ import (
"errors"
"fmt"
"io"
- stdioutil "io/ioutil"
"os"
"path/filepath"
"strings"
- "sync"
"github.com/go-git/go-billy/v5"
"github.com/go-git/go-billy/v5/util"
@@ -22,6 +20,7 @@ import (
"github.com/go-git/go-git/v5/plumbing/storer"
"github.com/go-git/go-git/v5/utils/ioutil"
"github.com/go-git/go-git/v5/utils/merkletrie"
+ "github.com/go-git/go-git/v5/utils/sync"
)
var (
@@ -290,7 +289,7 @@ func (w *Worktree) ResetSparsely(opts *ResetOptions, dirs []string) error {
return nil
}
- t, err := w.getTreeFromCommitHash(opts.Commit)
+ t, err := w.r.getTreeFromCommitHash(opts.Commit)
if err != nil {
return err
}
@@ -410,7 +409,7 @@ func (w *Worktree) checkoutChange(ch merkletrie.Change, t *object.Tree, idx *ind
isSubmodule = e.Mode == filemode.Submodule
case merkletrie.Delete:
- return rmFileAndDirIfEmpty(w.Filesystem, ch.From.String())
+ return rmFileAndDirsIfEmpty(w.Filesystem, ch.From.String())
}
if isSubmodule {
@@ -532,12 +531,6 @@ func (w *Worktree) checkoutChangeRegularFile(name string,
return nil
}
-var copyBufferPool = sync.Pool{
- New: func() interface{} {
- return make([]byte, 32*1024)
- },
-}
-
func (w *Worktree) checkoutFile(f *object.File) (err error) {
mode, err := f.Mode.ToOSFileMode()
if err != nil {
@@ -561,9 +554,9 @@ func (w *Worktree) checkoutFile(f *object.File) (err error) {
}
defer ioutil.CheckClose(to, &err)
- buf := copyBufferPool.Get().([]byte)
- _, err = io.CopyBuffer(to, from, buf)
- copyBufferPool.Put(buf)
+ buf := sync.GetByteSlice()
+ _, err = io.CopyBuffer(to, from, *buf)
+ sync.PutByteSlice(buf)
return
}
@@ -575,7 +568,7 @@ func (w *Worktree) checkoutFileSymlink(f *object.File) (err error) {
defer ioutil.CheckClose(from, &err)
- bytes, err := stdioutil.ReadAll(from)
+ bytes, err := io.ReadAll(from)
if err != nil {
return
}
@@ -639,8 +632,8 @@ func (w *Worktree) addIndexFromFile(name string, h plumbing.Hash, idx *indexBuil
return nil
}
-func (w *Worktree) getTreeFromCommitHash(commit plumbing.Hash) (*object.Tree, error) {
- c, err := w.r.CommitObject(commit)
+func (r *Repository) getTreeFromCommitHash(commit plumbing.Hash) (*object.Tree, error) {
+ c, err := r.CommitObject(commit)
if err != nil {
return nil, err
}
@@ -724,7 +717,7 @@ func (w *Worktree) readGitmodulesFile() (*config.Modules, error) {
}
defer f.Close()
- input, err := stdioutil.ReadAll(f)
+ input, err := io.ReadAll(f)
if err != nil {
return nil, err
}
@@ -784,8 +777,10 @@ func (w *Worktree) doClean(status Status, opts *CleanOptions, dir string, files
}
if opts.Dir && dir != "" {
- return doCleanDirectories(w.Filesystem, dir)
+ _, err := removeDirIfEmpty(w.Filesystem, dir)
+ return err
}
+
return nil
}
@@ -806,9 +801,9 @@ func (gr GrepResult) String() string {
return fmt.Sprintf("%s:%s:%d:%s", gr.TreeName, gr.FileName, gr.LineNumber, gr.Content)
}
-// Grep performs grep on a worktree.
-func (w *Worktree) Grep(opts *GrepOptions) ([]GrepResult, error) {
- if err := opts.Validate(w); err != nil {
+// Grep performs grep on a repository.
+func (r *Repository) Grep(opts *GrepOptions) ([]GrepResult, error) {
+ if err := opts.validate(r); err != nil {
return nil, err
}
@@ -818,7 +813,7 @@ func (w *Worktree) Grep(opts *GrepOptions) ([]GrepResult, error) {
var treeName string
if opts.ReferenceName != "" {
- ref, err := w.r.Reference(opts.ReferenceName, true)
+ ref, err := r.Reference(opts.ReferenceName, true)
if err != nil {
return nil, err
}
@@ -831,7 +826,7 @@ func (w *Worktree) Grep(opts *GrepOptions) ([]GrepResult, error) {
// Obtain a tree from the commit hash and get a tracked files iterator from
// the tree.
- tree, err := w.getTreeFromCommitHash(commitHash)
+ tree, err := r.getTreeFromCommitHash(commitHash)
if err != nil {
return nil, err
}
@@ -840,6 +835,11 @@ func (w *Worktree) Grep(opts *GrepOptions) ([]GrepResult, error) {
return findMatchInFiles(fileiter, treeName, opts)
}
+// Grep performs grep on a worktree.
+func (w *Worktree) Grep(opts *GrepOptions) ([]GrepResult, error) {
+ return w.r.Grep(opts)
+}
+
// findMatchInFiles takes a FileIter, worktree name and GrepOptions, and
// returns a slice of GrepResult containing the result of regex pattern matching
// in content of all the files.
@@ -926,25 +926,52 @@ func findMatchInFile(file *object.File, treeName string, opts *GrepOptions) ([]G
return grepResults, nil
}
-func rmFileAndDirIfEmpty(fs billy.Filesystem, name string) error {
+// will walk up the directory tree removing all encountered empty
+// directories, not just the one containing this file
+func rmFileAndDirsIfEmpty(fs billy.Filesystem, name string) error {
if err := util.RemoveAll(fs, name); err != nil {
return err
}
dir := filepath.Dir(name)
- return doCleanDirectories(fs, dir)
+ for {
+ removed, err := removeDirIfEmpty(fs, dir)
+ if err != nil {
+ return err
+ }
+
+ if !removed {
+ // directory was not empty and not removed,
+ // stop checking parents
+ break
+ }
+
+ // move to parent directory
+ dir = filepath.Dir(dir)
+ }
+
+ return nil
}
-// doCleanDirectories removes empty subdirs (without files)
-func doCleanDirectories(fs billy.Filesystem, dir string) error {
+// removeDirIfEmpty will remove the supplied directory `dir` if
+// `dir` is empty
+// returns true if the directory was removed
+func removeDirIfEmpty(fs billy.Filesystem, dir string) (bool, error) {
files, err := fs.ReadDir(dir)
if err != nil {
- return err
+ return false, err
}
- if len(files) == 0 {
- return fs.Remove(dir)
+
+ if len(files) > 0 {
+ return false, nil
}
- return nil
+
+ err = fs.Remove(dir)
+ if err != nil {
+ return false, err
+ }
+
+ return true, nil
}
type indexBuilder struct {
diff --git a/worktree_bsd.go b/worktree_bsd.go
index d4ea327..d4682eb 100644
--- a/worktree_bsd.go
+++ b/worktree_bsd.go
@@ -12,7 +12,7 @@ import (
func init() {
fillSystemInfo = func(e *index.Entry, sys interface{}) {
if os, ok := sys.(*syscall.Stat_t); ok {
- e.CreatedAt = time.Unix(int64(os.Atimespec.Sec), int64(os.Atimespec.Nsec))
+ e.CreatedAt = time.Unix(os.Atimespec.Unix())
e.Dev = uint32(os.Dev)
e.Inode = uint32(os.Ino)
e.GID = os.Gid
diff --git a/worktree_commit.go b/worktree_commit.go
index 86320d8..eaa21c3 100644
--- a/worktree_commit.go
+++ b/worktree_commit.go
@@ -2,6 +2,7 @@ package git
import (
"bytes"
+ "errors"
"path"
"sort"
"strings"
@@ -16,6 +17,12 @@ import (
"github.com/go-git/go-billy/v5"
)
+var (
+ // ErrEmptyCommit occurs when a commit is attempted using a clean
+ // working tree, with no changes to be committed.
+ ErrEmptyCommit = errors.New("cannot create empty commit: clean working tree")
+)
+
// Commit stores the current contents of the index in a new commit along with
// a log message from the user describing the changes.
func (w *Worktree) Commit(msg string, opts *CommitOptions) (plumbing.Hash, error) {
@@ -37,14 +44,14 @@ func (w *Worktree) Commit(msg string, opts *CommitOptions) (plumbing.Hash, error
return plumbing.ZeroHash, err
}
- t, err := w.getTreeFromCommitHash(head.Hash())
+ t, err := w.r.getTreeFromCommitHash(head.Hash())
if err != nil {
return plumbing.ZeroHash, err
}
treeHash = t.Hash
opts.Parents = []plumbing.Hash{head.Hash()}
- }else {
+ } else {
idx, err := w.r.Storer.Index()
if err != nil {
return plumbing.ZeroHash, err
@@ -55,7 +62,7 @@ func (w *Worktree) Commit(msg string, opts *CommitOptions) (plumbing.Hash, error
s: w.r.Storer,
}
- treeHash, err = h.BuildTree(idx)
+ treeHash, err = h.BuildTree(idx, opts)
if err != nil {
return plumbing.ZeroHash, err
}
@@ -162,7 +169,11 @@ type buildTreeHelper struct {
// BuildTree builds the tree objects and push its to the storer, the hash
// of the root tree is returned.
-func (h *buildTreeHelper) BuildTree(idx *index.Index) (plumbing.Hash, error) {
+func (h *buildTreeHelper) BuildTree(idx *index.Index, opts *CommitOptions) (plumbing.Hash, error) {
+ if len(idx.Entries) == 0 && (opts == nil || !opts.AllowEmptyCommits) {
+ return plumbing.ZeroHash, ErrEmptyCommit
+ }
+
const rootNode = ""
h.trees = map[string]*object.Tree{rootNode: {}}
h.entries = map[string]*object.TreeEntry{}
@@ -252,4 +263,4 @@ func (h *buildTreeHelper) copyTreeToStorageRecursive(parent string, t *object.Tr
return hash, nil
}
return h.s.SetEncodedObject(o)
-}
+} \ No newline at end of file
diff --git a/worktree_commit_test.go b/worktree_commit_test.go
index 547c820..1ac1990 100644
--- a/worktree_commit_test.go
+++ b/worktree_commit_test.go
@@ -26,12 +26,18 @@ import (
)
func (s *WorktreeSuite) TestCommitEmptyOptions(c *C) {
- r, err := Init(memory.NewStorage(), memfs.New())
+ fs := memfs.New()
+ r, err := Init(memory.NewStorage(), fs)
c.Assert(err, IsNil)
w, err := r.Worktree()
c.Assert(err, IsNil)
+ util.WriteFile(fs, "foo", []byte("foo"), 0644)
+
+ _, err = w.Add("foo")
+ c.Assert(err, IsNil)
+
hash, err := w.Commit("foo", &CommitOptions{})
c.Assert(err, IsNil)
c.Assert(hash.IsZero(), Equals, false)
@@ -65,6 +71,24 @@ func (s *WorktreeSuite) TestCommitInitial(c *C) {
assertStorageStatus(c, r, 1, 1, 1, expected)
}
+func (s *WorktreeSuite) TestNothingToCommit(c *C) {
+ expected := plumbing.NewHash("838ea833ce893e8555907e5ef224aa076f5e274a")
+
+ r, err := Init(memory.NewStorage(), memfs.New())
+ c.Assert(err, IsNil)
+
+ w, err := r.Worktree()
+ c.Assert(err, IsNil)
+
+ hash, err := w.Commit("failed empty commit\n", &CommitOptions{Author: defaultSignature()})
+ c.Assert(hash, Equals, plumbing.ZeroHash)
+ c.Assert(err, Equals, ErrEmptyCommit)
+
+ hash, err = w.Commit("enable empty commits\n", &CommitOptions{Author: defaultSignature(), AllowEmptyCommits: true})
+ c.Assert(hash, Equals, expected)
+ c.Assert(err, IsNil)
+}
+
func (s *WorktreeSuite) TestCommitParent(c *C) {
expected := plumbing.NewHash("ef3ca05477530b37f48564be33ddd48063fc7a22")
diff --git a/worktree_linux.go b/worktree_linux.go
index cf0db25..6fcace2 100644
--- a/worktree_linux.go
+++ b/worktree_linux.go
@@ -12,7 +12,7 @@ import (
func init() {
fillSystemInfo = func(e *index.Entry, sys interface{}) {
if os, ok := sys.(*syscall.Stat_t); ok {
- e.CreatedAt = time.Unix(int64(os.Ctim.Sec), int64(os.Ctim.Nsec))
+ e.CreatedAt = time.Unix(os.Ctim.Unix())
e.Dev = uint32(os.Dev)
e.Inode = uint32(os.Ino)
e.GID = os.Gid
diff --git a/worktree_status.go b/worktree_status.go
index c639f13..61bb6f7 100644
--- a/worktree_status.go
+++ b/worktree_status.go
@@ -169,7 +169,9 @@ func (w *Worktree) excludeIgnoredChanges(changes merkletrie.Changes) merkletrie.
if len(path) != 0 {
isDir := (len(ch.To) > 0 && ch.To.IsDir()) || (len(ch.From) > 0 && ch.From.IsDir())
if m.Match(path, isDir) {
- continue
+ if len(ch.From) == 0 {
+ continue
+ }
}
}
res = append(res, ch)
@@ -270,10 +272,6 @@ func (w *Worktree) Add(path string) (plumbing.Hash, error) {
}
func (w *Worktree) doAddDirectory(idx *index.Index, s Status, directory string, ignorePattern []gitignore.Pattern) (added bool, err error) {
- files, err := w.Filesystem.ReadDir(directory)
- if err != nil {
- return false, err
- }
if len(ignorePattern) > 0 {
m := gitignore.NewMatcher(ignorePattern)
matchPath := strings.Split(directory, string(os.PathSeparator))
@@ -283,32 +281,29 @@ func (w *Worktree) doAddDirectory(idx *index.Index, s Status, directory string,
}
}
- for _, file := range files {
- name := path.Join(directory, file.Name())
+ directory = filepath.ToSlash(filepath.Clean(directory))
- var a bool
- if file.IsDir() {
- if file.Name() == GitDirName {
- // ignore special git directory
- continue
- }
- a, err = w.doAddDirectory(idx, s, name, ignorePattern)
- } else {
- a, _, err = w.doAddFile(idx, s, name, ignorePattern)
+ for name := range s {
+ if !isPathInDirectory(name, directory) {
+ continue
}
+ var a bool
+ a, _, err = w.doAddFile(idx, s, name, ignorePattern)
if err != nil {
return
}
- if !added && a {
- added = true
- }
+ added = added || a
}
return
}
+func isPathInDirectory(path, directory string) bool {
+ return directory == "." || strings.HasPrefix(path, directory+"/")
+}
+
// AddWithOptions file contents to the index, updates the index using the
// current content found in the working tree, to prepare the content staged for
// the next commit.
diff --git a/worktree_test.go b/worktree_test.go
index 4a14126..24d5bd5 100644
--- a/worktree_test.go
+++ b/worktree_test.go
@@ -5,7 +5,6 @@ import (
"context"
"errors"
"io"
- "io/ioutil"
"os"
"path/filepath"
"regexp"
@@ -81,7 +80,7 @@ func (s *WorktreeSuite) TestPullFastForward(c *C) {
w, err := server.Worktree()
c.Assert(err, IsNil)
- err = ioutil.WriteFile(filepath.Join(path, "foo"), []byte("foo"), 0755)
+ err = os.WriteFile(filepath.Join(path, "foo"), []byte("foo"), 0755)
c.Assert(err, IsNil)
hash, err := w.Commit("foo", &CommitOptions{Author: defaultSignature()})
c.Assert(err, IsNil)
@@ -118,14 +117,14 @@ func (s *WorktreeSuite) TestPullNonFastForward(c *C) {
w, err := server.Worktree()
c.Assert(err, IsNil)
- err = ioutil.WriteFile(filepath.Join(path, "foo"), []byte("foo"), 0755)
+ err = os.WriteFile(filepath.Join(path, "foo"), []byte("foo"), 0755)
c.Assert(err, IsNil)
_, err = w.Commit("foo", &CommitOptions{Author: defaultSignature()})
c.Assert(err, IsNil)
w, err = r.Worktree()
c.Assert(err, IsNil)
- err = ioutil.WriteFile(filepath.Join(path, "bar"), []byte("bar"), 0755)
+ err = os.WriteFile(filepath.Join(path, "bar"), []byte("bar"), 0755)
c.Assert(err, IsNil)
_, err = w.Commit("bar", &CommitOptions{Author: defaultSignature()})
c.Assert(err, IsNil)
@@ -259,7 +258,7 @@ func (s *RepositorySuite) TestPullAdd(c *C) {
ExecuteOnPath(c, path,
"touch foo",
"git add foo",
- "git commit -m foo foo",
+ "git commit --no-gpg-sign -m foo foo",
)
w, err := r.Worktree()
@@ -287,7 +286,7 @@ func (s *WorktreeSuite) TestPullAlreadyUptodate(c *C) {
w, err := r.Worktree()
c.Assert(err, IsNil)
- err = ioutil.WriteFile(filepath.Join(path, "bar"), []byte("bar"), 0755)
+ err = os.WriteFile(filepath.Join(path, "bar"), []byte("bar"), 0755)
c.Assert(err, IsNil)
_, err = w.Commit("bar", &CommitOptions{Author: defaultSignature()})
c.Assert(err, IsNil)
@@ -315,7 +314,7 @@ func (s *WorktreeSuite) TestCheckout(c *C) {
ch, err := fs.Open("CHANGELOG")
c.Assert(err, IsNil)
- content, err := ioutil.ReadAll(ch)
+ content, err := io.ReadAll(ch)
c.Assert(err, IsNil)
c.Assert(string(content), Equals, "Initial changelog\n")
@@ -871,6 +870,51 @@ func (s *WorktreeSuite) TestStatusEmpty(c *C) {
c.Assert(status, NotNil)
}
+func (s *WorktreeSuite) TestStatusCheckedInBeforeIgnored(c *C) {
+ fs := memfs.New()
+ storage := memory.NewStorage()
+
+ r, err := Init(storage, fs)
+ c.Assert(err, IsNil)
+
+ w, err := r.Worktree()
+ c.Assert(err, IsNil)
+
+ err = util.WriteFile(fs, "fileToIgnore", []byte("Initial data"), 0755)
+ c.Assert(err, IsNil)
+ _, err = w.Add("fileToIgnore")
+ c.Assert(err, IsNil)
+ _, err = w.Commit("Added file that will be ignored later", &CommitOptions{})
+ c.Assert(err, IsNil)
+
+ err = util.WriteFile(fs, ".gitignore", []byte("fileToIgnore\nsecondIgnoredFile"), 0755)
+ c.Assert(err, IsNil)
+ _, err = w.Add(".gitignore")
+ c.Assert(err, IsNil)
+ _, err = w.Commit("Added .gitignore", &CommitOptions{})
+ c.Assert(err, IsNil)
+ status, err := w.Status()
+ c.Assert(err, IsNil)
+ c.Assert(status.IsClean(), Equals, true)
+ c.Assert(status, NotNil)
+
+ err = util.WriteFile(fs, "secondIgnoredFile", []byte("Should be completly ignored"), 0755)
+ c.Assert(err, IsNil)
+ status = nil
+ status, err = w.Status()
+ c.Assert(err, IsNil)
+ c.Assert(status.IsClean(), Equals, true)
+ c.Assert(status, NotNil)
+
+ err = util.WriteFile(fs, "fileToIgnore", []byte("Updated data"), 0755)
+ c.Assert(err, IsNil)
+ status = nil
+ status, err = w.Status()
+ c.Assert(err, IsNil)
+ c.Assert(status.IsClean(), Equals, false)
+ c.Assert(status, NotNil)
+}
+
func (s *WorktreeSuite) TestStatusEmptyDirty(c *C) {
fs := memfs.New()
err := util.WriteFile(fs, "foo", []byte("foo"), 0755)
@@ -1399,6 +1443,141 @@ func (s *WorktreeSuite) TestAddRemoved(c *C) {
c.Assert(file.Staging, Equals, Deleted)
}
+func (s *WorktreeSuite) TestAddRemovedInDirectory(c *C) {
+ fs := memfs.New()
+ w := &Worktree{
+ r: s.Repository,
+ Filesystem: fs,
+ }
+
+ err := w.Checkout(&CheckoutOptions{Force: true})
+ c.Assert(err, IsNil)
+
+ idx, err := w.r.Storer.Index()
+ c.Assert(err, IsNil)
+ c.Assert(idx.Entries, HasLen, 9)
+
+ err = w.Filesystem.Remove("go/example.go")
+ c.Assert(err, IsNil)
+
+ err = w.Filesystem.Remove("json/short.json")
+ c.Assert(err, IsNil)
+
+ hash, err := w.Add("go")
+ c.Assert(err, IsNil)
+ c.Assert(hash.IsZero(), Equals, true)
+
+ e, err := idx.Entry("go/example.go")
+ c.Assert(err, IsNil)
+ c.Assert(e.Hash, Equals, plumbing.NewHash("880cd14280f4b9b6ed3986d6671f907d7cc2a198"))
+ c.Assert(e.Mode, Equals, filemode.Regular)
+
+ e, err = idx.Entry("json/short.json")
+ c.Assert(err, IsNil)
+ c.Assert(e.Hash, Equals, plumbing.NewHash("c8f1d8c61f9da76f4cb49fd86322b6e685dba956"))
+ c.Assert(e.Mode, Equals, filemode.Regular)
+
+ status, err := w.Status()
+ c.Assert(err, IsNil)
+ c.Assert(status, HasLen, 2)
+
+ file := status.File("go/example.go")
+ c.Assert(file.Staging, Equals, Deleted)
+
+ file = status.File("json/short.json")
+ c.Assert(file.Staging, Equals, Unmodified)
+}
+
+func (s *WorktreeSuite) TestAddRemovedInDirectoryWithTrailingSlash(c *C) {
+ fs := memfs.New()
+ w := &Worktree{
+ r: s.Repository,
+ Filesystem: fs,
+ }
+
+ err := w.Checkout(&CheckoutOptions{Force: true})
+ c.Assert(err, IsNil)
+
+ idx, err := w.r.Storer.Index()
+ c.Assert(err, IsNil)
+ c.Assert(idx.Entries, HasLen, 9)
+
+ err = w.Filesystem.Remove("go/example.go")
+ c.Assert(err, IsNil)
+
+ err = w.Filesystem.Remove("json/short.json")
+ c.Assert(err, IsNil)
+
+ hash, err := w.Add("go/")
+ c.Assert(err, IsNil)
+ c.Assert(hash.IsZero(), Equals, true)
+
+ e, err := idx.Entry("go/example.go")
+ c.Assert(err, IsNil)
+ c.Assert(e.Hash, Equals, plumbing.NewHash("880cd14280f4b9b6ed3986d6671f907d7cc2a198"))
+ c.Assert(e.Mode, Equals, filemode.Regular)
+
+ e, err = idx.Entry("json/short.json")
+ c.Assert(err, IsNil)
+ c.Assert(e.Hash, Equals, plumbing.NewHash("c8f1d8c61f9da76f4cb49fd86322b6e685dba956"))
+ c.Assert(e.Mode, Equals, filemode.Regular)
+
+ status, err := w.Status()
+ c.Assert(err, IsNil)
+ c.Assert(status, HasLen, 2)
+
+ file := status.File("go/example.go")
+ c.Assert(file.Staging, Equals, Deleted)
+
+ file = status.File("json/short.json")
+ c.Assert(file.Staging, Equals, Unmodified)
+}
+
+func (s *WorktreeSuite) TestAddRemovedInDirectoryDot(c *C) {
+ fs := memfs.New()
+ w := &Worktree{
+ r: s.Repository,
+ Filesystem: fs,
+ }
+
+ err := w.Checkout(&CheckoutOptions{Force: true})
+ c.Assert(err, IsNil)
+
+ idx, err := w.r.Storer.Index()
+ c.Assert(err, IsNil)
+ c.Assert(idx.Entries, HasLen, 9)
+
+ err = w.Filesystem.Remove("go/example.go")
+ c.Assert(err, IsNil)
+
+ err = w.Filesystem.Remove("json/short.json")
+ c.Assert(err, IsNil)
+
+ hash, err := w.Add(".")
+ c.Assert(err, IsNil)
+ c.Assert(hash.IsZero(), Equals, true)
+
+ e, err := idx.Entry("go/example.go")
+ c.Assert(err, IsNil)
+ c.Assert(e.Hash, Equals, plumbing.NewHash("880cd14280f4b9b6ed3986d6671f907d7cc2a198"))
+ c.Assert(e.Mode, Equals, filemode.Regular)
+
+ e, err = idx.Entry("json/short.json")
+ c.Assert(err, IsNil)
+ c.Assert(e.Hash, Equals, plumbing.NewHash("c8f1d8c61f9da76f4cb49fd86322b6e685dba956"))
+ c.Assert(e.Mode, Equals, filemode.Regular)
+
+ status, err := w.Status()
+ c.Assert(err, IsNil)
+ c.Assert(status, HasLen, 2)
+
+ file := status.File("go/example.go")
+ c.Assert(file.Staging, Equals, Deleted)
+
+ file = status.File("json/short.json")
+ c.Assert(file.Staging, Equals, Deleted)
+}
+
func (s *WorktreeSuite) TestAddSymlink(c *C) {
dir, clean := s.TemporalDir()
defer clean()
@@ -2166,7 +2345,140 @@ func (s *WorktreeSuite) TestGrep(c *C) {
}
}
+func (s *WorktreeSuite) TestGrepBare(c *C) {
+ cases := []struct {
+ name string
+ options GrepOptions
+ wantResult []GrepResult
+ dontWantResult []GrepResult
+ wantError error
+ }{
+ {
+ name: "basic word match",
+ options: GrepOptions{
+ Patterns: []*regexp.Regexp{regexp.MustCompile("import")},
+ CommitHash: plumbing.ZeroHash,
+ },
+ wantResult: []GrepResult{
+ {
+ FileName: "go/example.go",
+ LineNumber: 3,
+ Content: "import (",
+ TreeName: "6ecf0ef2c2dffb796033e5a02219af86ec6584e5",
+ },
+ {
+ FileName: "vendor/foo.go",
+ LineNumber: 3,
+ Content: "import \"fmt\"",
+ TreeName: "6ecf0ef2c2dffb796033e5a02219af86ec6584e5",
+ },
+ },
+ },
+ }
+
+ path := fixtures.Basic().ByTag("worktree").One().Worktree().Root()
+
+ dir, clean := s.TemporalDir()
+ defer clean()
+
+ r, err := PlainClone(dir, true, &CloneOptions{
+ URL: path,
+ })
+ c.Assert(err, IsNil)
+
+ for _, tc := range cases {
+ gr, err := r.Grep(&tc.options)
+ if tc.wantError != nil {
+ c.Assert(err, Equals, tc.wantError)
+ } else {
+ c.Assert(err, IsNil)
+ }
+
+ // Iterate through the results and check if the wanted result is present
+ // in the got result.
+ for _, wantResult := range tc.wantResult {
+ found := false
+ for _, gotResult := range gr {
+ if wantResult == gotResult {
+ found = true
+ break
+ }
+ }
+ if !found {
+ c.Errorf("unexpected grep results for %q, expected result to contain: %v", tc.name, wantResult)
+ }
+ }
+
+ // Iterate through the results and check if the not wanted result is
+ // present in the got result.
+ for _, dontWantResult := range tc.dontWantResult {
+ found := false
+ for _, gotResult := range gr {
+ if dontWantResult == gotResult {
+ found = true
+ break
+ }
+ }
+ if found {
+ c.Errorf("unexpected grep results for %q, expected result to NOT contain: %v", tc.name, dontWantResult)
+ }
+ }
+ }
+}
+
+func (s *WorktreeSuite) TestResetLingeringDirectories(c *C) {
+ dir, clean := s.TemporalDir()
+ defer clean()
+
+ commitOpts := &CommitOptions{Author: &object.Signature{
+ Name: "foo",
+ Email: "foo@foo.foo",
+ When: time.Now(),
+ }}
+
+ repo, err := PlainInit(dir, false)
+ c.Assert(err, IsNil)
+
+ w, err := repo.Worktree()
+ c.Assert(err, IsNil)
+
+ os.WriteFile(filepath.Join(dir, "README"), []byte("placeholder"), 0o644)
+
+ _, err = w.Add(".")
+ c.Assert(err, IsNil)
+
+ initialHash, err := w.Commit("Initial commit", commitOpts)
+ c.Assert(err, IsNil)
+
+ os.MkdirAll(filepath.Join(dir, "a", "b"), 0o755)
+ os.WriteFile(filepath.Join(dir, "a", "b", "1"), []byte("1"), 0o644)
+
+ _, err = w.Add(".")
+ c.Assert(err, IsNil)
+
+ _, err = w.Commit("Add file in nested sub-directories", commitOpts)
+ c.Assert(err, IsNil)
+
+ // reset to initial commit, which should remove a/b/1, a/b, and a
+ err = w.Reset(&ResetOptions{
+ Commit: initialHash,
+ Mode: HardReset,
+ })
+ c.Assert(err, IsNil)
+
+ _, err = os.Stat(filepath.Join(dir, "a", "b", "1"))
+ c.Assert(errors.Is(err, os.ErrNotExist), Equals, true)
+
+ _, err = os.Stat(filepath.Join(dir, "a", "b"))
+ c.Assert(errors.Is(err, os.ErrNotExist), Equals, true)
+
+ _, err = os.Stat(filepath.Join(dir, "a"))
+ c.Assert(errors.Is(err, os.ErrNotExist), Equals, true)
+}
+
func (s *WorktreeSuite) TestAddAndCommit(c *C) {
+ expectedFiles := 2
+
dir, clean := s.TemporalDir()
defer clean()
@@ -2176,17 +2488,23 @@ func (s *WorktreeSuite) TestAddAndCommit(c *C) {
w, err := repo.Worktree()
c.Assert(err, IsNil)
+ os.WriteFile(filepath.Join(dir, "foo"), []byte("bar"), 0o644)
+ os.WriteFile(filepath.Join(dir, "bar"), []byte("foo"), 0o644)
+
_, err = w.Add(".")
c.Assert(err, IsNil)
- w.Commit("Test Add And Commit", &CommitOptions{Author: &object.Signature{
+ _, err = w.Commit("Test Add And Commit", &CommitOptions{Author: &object.Signature{
Name: "foo",
Email: "foo@foo.foo",
When: time.Now(),
}})
+ c.Assert(err, IsNil)
iter, err := w.r.Log(&LogOptions{})
c.Assert(err, IsNil)
+
+ filesFound := 0
err = iter.ForEach(func(c *object.Commit) error {
files, err := c.Files()
if err != nil {
@@ -2194,11 +2512,34 @@ func (s *WorktreeSuite) TestAddAndCommit(c *C) {
}
err = files.ForEach(func(f *object.File) error {
- return errors.New("Expected no files, got at least 1")
+ filesFound++
+ return nil
})
return err
})
c.Assert(err, IsNil)
+ c.Assert(filesFound, Equals, expectedFiles)
+}
+
+func (s *WorktreeSuite) TestAddAndCommitEmpty(c *C) {
+ dir, clean := s.TemporalDir()
+ defer clean()
+
+ repo, err := PlainInit(dir, false)
+ c.Assert(err, IsNil)
+
+ w, err := repo.Worktree()
+ c.Assert(err, IsNil)
+
+ _, err = w.Add(".")
+ c.Assert(err, IsNil)
+
+ _, err = w.Commit("Test Add And Commit", &CommitOptions{Author: &object.Signature{
+ Name: "foo",
+ Email: "foo@foo.foo",
+ When: time.Now(),
+ }})
+ c.Assert(err, Equals, ErrEmptyCommit)
}
func (s *WorktreeSuite) TestLinkedWorktree(c *C) {
diff --git a/worktree_unix_other.go b/worktree_unix_other.go
index f45966b..5b16e70 100644
--- a/worktree_unix_other.go
+++ b/worktree_unix_other.go
@@ -12,7 +12,7 @@ import (
func init() {
fillSystemInfo = func(e *index.Entry, sys interface{}) {
if os, ok := sys.(*syscall.Stat_t); ok {
- e.CreatedAt = time.Unix(int64(os.Atim.Sec), int64(os.Atim.Nsec))
+ e.CreatedAt = time.Unix(os.Atim.Unix())
e.Dev = uint32(os.Dev)
e.Inode = uint32(os.Ino)
e.GID = os.Gid